public class ExpandedTree extends Object implements org.apache.hadoop.io.Writable
Constructor and Description |
---|
ExpandedTree() |
Modifier and Type | Method and Description |
---|---|
boolean |
atomEquals(int atom,
byte[] value) |
String |
atomString(int i) |
boolean |
containLinks() |
String[] |
getCollections() |
String |
getDocumentURI() |
long |
getFragmentOrdinal() |
Map<String,String> |
getMetadata() |
org.apache.hadoop.fs.Path |
getPathToBinary() |
int |
getQuality() |
String |
getText(int index) |
Node |
node(int i) |
void |
putNode(int index,
StringBuilder sb) |
void |
readFields(DataInput in) |
byte |
rootNodeKind() |
void |
setFragmentOrdinal(long fragmentOrdinal) |
void |
setQuality(int quality) |
String |
toString() |
void |
write(DataOutput out) |
public static final org.apache.commons.logging.Log LOG
public long ordinal
public long uriKey
public long uniqKey
public long linkKey
public long[] keys
public byte[] atomData
public String[] atomString
public int[] atomIndex
public long[] nodeOrdinal
public byte[] nodeKind
public int[] nodeRepID
public int[] nodeParentNodeRepID
public int[] docNodeTextRepID
public int[] docNodeChildNodeRepID
public int[] docNodeNumChildren
public int[] elemNodeNodeNameRepID
public int[] elemNodeAttrNodeRepID
public int[] elemNodeChildNodeRepID
public int[] elemNodeElemDeclRepID
public int[] elemNodeNumAttributes
public int[] elemNodeNumDefaultAttrs
public int[] elemNodeNumChildren
public int[] elemNodeFlags
public int[] attrNodeNodeNameRepID
public int[] attrNodeTextRepID
public int[] attrNodeAttrDeclRepID
public int[] piNodeTargetAtom
public int[] piNodeTextRepID
public long[] linkNodeKey
public long[] linkNodeNodeCount
public int[] linkNodeNodeNameRepID
public int[] linkNodeNodeRepID
public int[] nodeNameNameAtom
public int[] nodeNameNamespaceAtom
public long[] nsNodeOrdinal
public int[] nsNodePrevNSNodeRepID
public int[] nsNodePrefixAtom
public int[] nsNodeUriAtom
public long[] permNodeOrdinal
public int[] permNodePrevPermNodeRepID
public Capability[] permNodeCapability
public long[] permNodeRoleId
public int[] arrayNodeTextRepID
public int[] arrayNodeChildNodeRepID
public int[] arrayNodeNumChildren
public double[] doubles
public long binaryKey
public long binaryOffset
public long binarySize
public long binaryOrigLen
public int binaryPathAtom
public int numTextReps
public int[] textReps
public int[] binaryData
public int atomLimit
public int numKeys
public int numNodeReps
public int numNSNodeReps
public int numPermNodeReps
public int numLinkNodeReps
public int uriTextRepID
public int colsTextRepID
public int[] metaKeys
public int[] metaVals
public int schemaRepUID
public long schemaTimestamp
public int numMetadata
public boolean atomEquals(int atom, byte[] value)
public String atomString(int i)
public String getText(int index)
public String[] getCollections()
public byte rootNodeKind()
public Node node(int i)
public String getDocumentURI()
public org.apache.hadoop.fs.Path getPathToBinary()
public boolean containLinks()
public long getFragmentOrdinal()
public void setFragmentOrdinal(long fragmentOrdinal)
public int getQuality()
public void setQuality(int quality)
public void readFields(DataInput in) throws IOException
readFields
in
interface org.apache.hadoop.io.Writable
IOException
public void write(DataOutput out) throws IOException
write
in
interface org.apache.hadoop.io.Writable
IOException
public void putNode(int index, StringBuilder sb)
Copyright © 2020 MarkLogic
Corporation. All Rights Reserved.
Complete online documentation for MarkLogic Server,
XQuery and related components may be found at
developer.marklogic.com