idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
23,300
public ItemList getItemList ( ) { if ( ! atomics . containsKey ( mRTX ) ) { atomics . put ( mRTX , new ItemList ( ) ) ; } return atomics . get ( mRTX ) ; }
Getting the ItemList .
53
5
23,301
public static int addAtomicToItemList ( final INodeReadTrx pRtx , final AtomicValue pVal ) { if ( ! atomics . containsKey ( pRtx ) ) { atomics . put ( pRtx , new ItemList ( ) ) ; } return atomics . get ( pRtx ) . addItem ( pVal ) ; }
Adding any AtomicVal to any ItemList staticly .
78
11
23,302
public DirRecord nextRecord ( ) throws NamingException { if ( inp == null ) { if ( in == null ) { throw new NamingException ( "No ldif input stream" ) ; } inp = new LdifRecord . Input ( ) ; inp . init ( new InputStreamReader ( in ) ) ; } else if ( inp . eof ) { return null ; } LdifRecord ldr = new LdifRecord ( ) ; if ( ! ldr . read ( inp ) ) { return null ; } return ldr ; }
Return the next record in the input stream .
123
9
23,303
public static void dumpLdif ( LdifOut lo , DirRecord rec ) throws NamingException { if ( rec == null ) { throw new NamingException ( "dumpLdif: No record supplied" ) ; } String dn = rec . getDn ( ) ; if ( dn == null ) { throw new NamingException ( "Unable to get dn" ) ; } lo . out ( "dn: " + dn ) ; int ctype = rec . getChangeType ( ) ; if ( ! rec . getIsContent ( ) ) { // Emit a changetype attribute lo . out ( "changeType: " + LdifRecord . changeTypes [ ctype ] ) ; } if ( ( rec . getIsContent ( ) ) || ( ctype == DirRecord . changeTypeAdd ) ) { Attributes as = rec . getAttributes ( ) ; if ( as == null ) throw new NamingException ( "No attributes" ) ; Enumeration e = as . getAll ( ) ; while ( e . hasMoreElements ( ) ) { dumpAttr ( lo , ( Attribute ) e . nextElement ( ) ) ; } // while } else if ( ctype == DirRecord . changeTypeDelete ) { lo . out ( "changetype: delete" ) ; } else { lo . out ( "changetype: modify" ) ; // Dump changes ModificationItem [ ] mods = rec . getMods ( ) ; if ( mods == null ) { lo . out ( "# Invalid record - no mods" ) ; } else { for ( int i = 0 ; i < mods . length ; i ++ ) { ModificationItem m = mods [ i ] ; int op = m . getModificationOp ( ) ; Attribute a = m . getAttribute ( ) ; String aid = a . getID ( ) ; if ( op == DirContext . ADD_ATTRIBUTE ) { lo . out ( "add: " + aid ) ; } else if ( op == DirContext . REPLACE_ATTRIBUTE ) { lo . out ( "replace: " + aid ) ; } else if ( op == DirContext . REMOVE_ATTRIBUTE ) { lo . out ( "delete: " + aid ) ; } else { lo . out ( "# Invalid record - bad mod op " + op ) ; } dumpAttr ( lo , a ) ; } } lo . out ( "-" ) ; } lo . out ( "" ) ; // null terminator }
dumpLdif write the entire record as ldif .
540
13
23,304
public static String makeLocale ( String lang , String country ) { if ( ( lang == null ) || ( lang . length ( ) == 0 ) ) { return localeInfoDefaultDefault ; } if ( ( country == null ) || ( country . length ( ) == 0 ) ) { return localeInfoDefaultDefault ; } return lang + "_" + country ; }
If either the lang or country is null we provide a default value for the whole locale . Otherwise we construct one .
75
23
23,305
private StreamingOutput createOutput ( final JaxRx impl , final ResourcePath path ) { // check for command parameter String qu = path . getValue ( QueryParameter . COMMAND ) ; if ( qu != null ) { return impl . command ( qu , path ) ; } // check for run parameter qu = path . getValue ( QueryParameter . RUN ) ; if ( qu != null ) { return impl . run ( qu , path ) ; } // check for query parameter qu = path . getValue ( QueryParameter . QUERY ) ; if ( qu != null ) { return impl . query ( qu , path ) ; } // no parameter found return impl . get ( path ) ; }
Returns a stream output depending on the query parameters .
143
10
23,306
Response createResponse ( final JaxRx impl , final ResourcePath path ) { final StreamingOutput out = createOutput ( impl , path ) ; // change media type, dependent on WRAP value final boolean wrap = path . getValue ( QueryParameter . WRAP ) == null || path . getValue ( QueryParameter . WRAP ) . equals ( "yes" ) ; String type = wrap ? MediaType . APPLICATION_XML : MediaType . TEXT_PLAIN ; // overwrite type if METHOD or MEDIA-TYPE parameters are specified final String op = path . getValue ( QueryParameter . OUTPUT ) ; if ( op != null ) { final Scanner sc = new Scanner ( op ) ; sc . useDelimiter ( "," ) ; while ( sc . hasNext ( ) ) { final String [ ] sp = sc . next ( ) . split ( "=" , 2 ) ; if ( sp . length == 1 ) continue ; if ( sp [ 0 ] . equals ( METHOD ) ) { for ( final String [ ] m : METHODS ) if ( sp [ 1 ] . equals ( m [ 0 ] ) ) type = m [ 1 ] ; } else if ( sp [ 0 ] . equals ( MEDIATYPE ) ) { type = sp [ 1 ] ; } } } // check validity of media type MediaType mt = null ; try { mt = MediaType . valueOf ( type ) ; } catch ( final IllegalArgumentException ex ) { throw new JaxRxException ( 400 , ex . getMessage ( ) ) ; } return Response . ok ( out , mt ) . build ( ) ; }
Returns a result depending on the query parameters .
346
9
23,307
protected Map < QueryParameter , String > getParameters ( final UriInfo uri , final JaxRx jaxrx ) { final MultivaluedMap < String , String > params = uri . getQueryParameters ( ) ; final Map < QueryParameter , String > newParam = createMap ( ) ; final Set < QueryParameter > impl = jaxrx . getParameters ( ) ; for ( final String key : params . keySet ( ) ) { for ( final String s : params . get ( key ) ) { addParameter ( key , s , newParam , impl ) ; } } return newParam ; }
Extracts and returns query parameters from the specified map . If a parameter is specified multiple times its values will be separated with tab characters .
129
28
23,308
private Map < QueryParameter , String > createMap ( ) { final Map < QueryParameter , String > params = new HashMap < QueryParameter , String > ( ) ; final Properties props = System . getProperties ( ) ; for ( final Map . Entry < Object , Object > set : props . entrySet ( ) ) { final String key = set . getKey ( ) . toString ( ) ; final String up = key . replace ( "org.jaxrx.parameter." , "" ) ; if ( key . equals ( up ) ) continue ; try { params . put ( QueryParameter . valueOf ( up . toUpperCase ( ) ) , set . getValue ( ) . toString ( ) ) ; } catch ( final IllegalArgumentException ex ) { /* ignore */ } } return params ; }
Returns a fresh parameter map . This map contains all parameters as defaults which have been specified by the user via system properties with the pattern org . jaxrx . parameter . KEY as key .
172
38
23,309
private void prefetch ( long storageIndex ) { long startIndex = storageIndex / BYTES_IN_DATA ; // Inc to next bucket startIndex += 128 ; if ( mPrefetchedBuckets . contains ( startIndex ) ) { mPrefetchedBuckets . remove ( startIndex ) ; return ; } for ( int i = 0 ; i < BUCKETS_TO_PREFETCH ; i ++ ) { if ( ( startIndex + i ) > ( mNodeNumbers / 128 ) ) { return ; } mRtx . moveTo ( startIndex ) ; startIndex += 128 ; } }
Prefetch buckets if necessary
132
5
23,310
public Document check ( final InputStream input ) { Document document ; try { final DocumentBuilder docBuilder = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; document = docBuilder . parse ( input ) ; final InputStream is = getClass ( ) . getResourceAsStream ( xslSchema ) ; final Source source = new SAXSource ( new InputSource ( is ) ) ; checkIsValid ( document , source ) ; } catch ( final SAXException exce ) { throw new JaxRxException ( 400 , exce . getMessage ( ) ) ; } catch ( final ParserConfigurationException exce ) { throw new JaxRxException ( exce ) ; } catch ( final IOException exce ) { throw new JaxRxException ( exce ) ; } return document ; }
This method parses an XML input with a W3C DOM implementation and validates it then with the available XML schema .
168
25
23,311
private void checkIsValid ( final Document document , final Source source ) throws SAXException , IOException { final SchemaFactory schemaFactory = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) ; final Schema schema = schemaFactory . newSchema ( source ) ; final Validator validator = schema . newValidator ( ) ; validator . validate ( new DOMSource ( document ) ) ; }
This method checks the parsed document if it is valid to a given XML schema . If not an exception is thrown
99
22
23,312
public static String getUid ( ) { /* Unique down to millisecond */ short hiTime = ( short ) ( System . currentTimeMillis ( ) >>> 32 ) ; int loTime = ( int ) System . currentTimeMillis ( ) ; int ct ; synchronized ( Uid . class ) { if ( counter < 0 ) { counter = 0 ; } ct = counter ++ ; } return new StringBuilder ( 36 ) . append ( format ( IP ) ) . append ( sep ) . append ( format ( JVM ) ) . append ( sep ) . append ( format ( hiTime ) ) . append ( sep ) . append ( format ( loTime ) ) . append ( sep ) . append ( format ( ct ) ) . toString ( ) ; }
Code copied and modified from hibernate UUIDHexGenerator . Generates a unique 36 character key of hex + separators .
161
28
23,313
public static int toInt ( byte [ ] bytes ) { int result = 0 ; for ( int i = 0 ; i < 4 ; i ++ ) { result = ( result << 8 ) - Byte . MIN_VALUE + ( int ) bytes [ i ] ; } return result ; }
From hibernate . util
59
6
23,314
public boolean search ( String base , String filter ) throws NamingException { return search ( base , filter , scopeSub ) ; }
Carry out a subtree search
27
7
23,315
public boolean searchBase ( String base , String filter ) throws NamingException { return search ( base , filter , scopeBase ) ; }
Carry out a base level search . This should be the default if the scope is not specified .
28
20
23,316
public boolean searchOne ( String base , String filter ) throws NamingException { return search ( base , filter , scopeOne ) ; }
Carry out a one level search
28
7
23,317
public DirRecord newRecord ( String entryDn ) throws NamingException { DirRecord rec = new BasicDirRecord ( ) ; rec . setDn ( entryDn ) ; return rec ; }
newRecord - Return a record which can have attribute values added . create should be called to create the directory entry .
42
23
23,318
public LogIterator getIterator ( ) { Set < Entry < LogKey , LogValue > > entries = mCache . asMap ( ) . entrySet ( ) ; for ( Entry < LogKey , LogValue > entry : entries ) { insertIntoBDB ( entry . getKey ( ) , entry . getValue ( ) ) ; } return new LogIterator ( ) ; }
Returning all elements as Iterator .
79
8
23,319
public boolean read ( Input in ) throws NamingException { clear ( ) ; this . in = in ; // somedata = false; haveControls = false ; crec = null ; for ( ; ; ) { int alen = - 1 ; try { crec = in . readFullLine ( ) ; } catch ( Exception e ) { throwmsg ( e . getMessage ( ) ) ; } int inLen = 0 ; if ( crec != null ) { inLen = crec . length ( ) ; } /* System.out.println("ldifrec len=" + inLen + " data=" + crec + " state=" + state); */ if ( crec != null ) { ldifData . addElement ( crec ) ; alen = crec . indexOf ( ' ' ) ; } // Null terminator means we're done if ( inLen == 0 ) { // There are some state we should not be in here if ( state == stateModSpec ) { // Any others? invalid ( ) ; } break ; } if ( ( inLen > 0 ) && ( crec . startsWith ( "#" ) ) ) { // Comment line. Ignore it. } else if ( alen > 0 ) { /** We have something of the form <name> : <val> or <name> :: <encoded-val> or for base-64 encoded data <name> :< <url> for the url of an inclusion */ String attr = null ; StringBuffer val = null ; boolean encoded = false ; boolean url = false ; int valStart ; valStart = alen + 1 ; if ( valStart == inLen ) { throw new NamingException ( "Bad input value \"" + crec + "\"" ) ; } else if ( ( alen < inLen ) && ( crec . charAt ( valStart ) == ' ' ) ) { valStart ++ ; encoded = true ; } else if ( ( alen < inLen ) && ( crec . charAt ( valStart ) == ' ' ) ) { valStart ++ ; url = true ; } while ( ( valStart < inLen ) && ( crec . charAt ( valStart ) == ' ' ) ) { valStart ++ ; } attr = crec . substring ( 0 , alen ) . toLowerCase ( ) ; val = new StringBuffer ( crec . substring ( valStart ) ) ; addAttrVal ( attr , val . toString ( ) , encoded , url ) ; } else if ( ( state == stateModSpec ) && ( inLen == 1 ) && ( crec . equals ( "-" ) ) ) { // We have a current change to add to the change vector. if ( changes == null ) { changes = new Vector ( ) ; } changes . addElement ( curChange ) ; curChange = null ; state = stateModify ; } else if ( inLen > 0 ) { invalid ( ) ; } } return somedata ; }
Read an entire ldif record from an input stream
635
11
23,320
public boolean writeInputData ( Writer wtr ) throws Throwable { if ( ( ldifData == null ) || ( ldifData . size ( ) == 0 ) ) { return false ; } synchronized ( wtr ) { for ( int i = 0 ; i < ldifData . size ( ) ; i ++ ) { String str = ( String ) ldifData . elementAt ( i ) ; wtr . write ( str ) ; wtr . write ( ' ' ) ; } // terminate with null wtr . write ( ' ' ) ; wtr . flush ( ) ; } return true ; }
Write the data we built this from
131
7
23,321
public void write ( Writer wtr ) throws Throwable { // First we need the dn wtr . write ( getDn ( ) ) ; wtr . write ( ' ' ) ; throw new Exception ( "Incomplete" ) ; }
Write an ldif record representing this object
51
9
23,322
public void add ( final AbsAxis mAx ) { AbsAxis axis = mAx ; if ( isDupOrd ( axis ) ) { axis = new DupFilterAxis ( mRtx , axis ) ; DupState . nodup = true ; } switch ( mNumber ) { case 0 : mFirstAxis = axis ; mNumber ++ ; break ; case 1 : mExpr = new NestedAxis ( mFirstAxis , axis , mRtx ) ; mNumber ++ ; break ; default : final AbsAxis cache = mExpr ; mExpr = new NestedAxis ( cache , axis , mRtx ) ; } }
Adds a new Axis to the expression chain . The first axis that is added has to be stored till a second axis is added . When the second axis is added it is nested with the first one and builds the execution chain .
141
45
23,323
private static int decodePercent ( final CharSequence s , final int length , final int i ) { if ( i + 2 >= length ) { return INVALID ; } final char n1 = s . charAt ( i + 1 ) ; final char n2 = s . charAt ( i + 2 ) ; return decodeNibbles ( n1 , n2 ) ; }
Decode a percent encoded byte . E . g . %3F - > 63 .
80
18
23,324
private static int decodeNibbles ( final char c1 , final char c2 ) { final int n1 = decodeHex ( c1 ) ; if ( n1 == INVALID ) { return INVALID ; } final int n2 = decodeHex ( c2 ) ; if ( n2 == INVALID ) { return INVALID ; } return ( ( ( n1 & 0xf ) << 4 ) | ( n2 & 0xf ) ) ; }
Decode two hex nibbles to a byte . E . g . 3 and F - > 63 .
101
21
23,325
private static int decodeHex ( final char c ) { if ( c < ' ' ) { return INVALID ; } if ( c <= ' ' ) { return c - ' ' ; } if ( c < ' ' ) { return INVALID ; } if ( c <= ' ' ) { return c - ' ' + 10 ; } if ( c < ' ' ) { return INVALID ; } if ( c <= ' ' ) { return c - ' ' + 10 ; } return INVALID ; }
Decode a hex nibble . E . g . 3 - > 3 and F - > 15 .
109
21
23,326
private static int utf8Read3 ( int cu1 , int cu2 , int cu3 ) { if ( ( cu2 & 0xC0 ) != 0x80 ) { return INVALID ; } if ( cu1 == 0xE0 && cu2 < 0xA0 ) { // overlong return INVALID ; } if ( ( cu3 & 0xC0 ) != 0x80 ) { return INVALID ; } return ( cu1 << 12 ) + ( cu2 << 6 ) + cu3 - 0xE2080 ; }
Read a 3 byte UTF8 sequence .
121
8
23,327
private static int utf8Read4 ( int cu1 , int cu2 , int cu3 , int cu4 ) { if ( ( cu2 & 0xC0 ) != 0x80 ) { return INVALID ; } if ( cu1 == 0xF0 && cu2 < 0x90 ) { return INVALID ; // overlong } if ( cu1 == 0xF4 && cu2 >= 0x90 ) { return INVALID ; // > U+10FFFF } if ( ( cu3 & 0xC0 ) != 0x80 ) { return INVALID ; } if ( ( cu4 & 0xC0 ) != 0x80 ) { return INVALID ; } return ( cu1 << 18 ) + ( cu2 << 12 ) + ( cu3 << 6 ) + cu4 - 0x3C82080 ; }
Read a 4 byte UTF8 sequence .
185
8
23,328
public void close ( ) { if ( consumer != null ) { try { consumer . close ( ) ; } catch ( Throwable t ) { warn ( t . getMessage ( ) ) ; } } conn . close ( ) ; }
Close and release resources .
48
5
23,329
public void process ( final boolean asynch ) throws NotificationException { if ( asynch ) { try { consumer . setMessageListener ( this ) ; return ; } catch ( final JMSException je ) { throw new NotificationException ( je ) ; } } while ( running ) { final Message m = conn . receive ( ) ; if ( m == null ) { running = false ; return ; } onMessage ( m ) ; } }
For asynch we do the onMessage listener style . Otherwise we wait synchronously for incoming messages .
91
21
23,330
public static boolean getURL ( final String sUrl , final AtomicInteger gCount , long start ) { int count = gCount . incrementAndGet ( ) ; if ( count % 100 == 0 ) { long diff = ( System . currentTimeMillis ( ) - start ) / 1000 ; logger . info ( "Count: " + count + " IPS: " + count / diff ) ; } final URL url ; try { url = new URL ( sUrl ) ; } catch ( MalformedURLException e ) { logger . info ( "URL-Failed(" + count + "): " + e . toString ( ) ) ; return false ; } logger . log ( Level . FINE , "Testing(" + count + "): " + url ) ; final URLConnection con ; try { con = url . openConnection ( ) ; con . setConnectTimeout ( 10000 ) ; con . setReadTimeout ( 10000 ) ; if ( - 1 == con . getInputStream ( ) . read ( ) ) { return false ; } } catch ( IOException e ) { // don't print out time out as it is expected here if ( Utils . isIgnorableException ( e ) ) { return false ; } logger . log ( Level . WARNING , "Failed (" + url + ")(" + count + ")" , e ) ; return false ; } //logger.info(con); // logger.info("Date : " + new Date(con.getDate())); logger . info ( "Last Modified (" + url + ")(" + count + "): " + new Date ( con . getLastModified ( ) ) ) ; // logger.info( "Content encoding: " + con.getContentEncoding() // ); // logger.info( "Content type : " + con.getContentType() ); // logger.info( "Content length : " + con.getContentLength() ); return true ; }
Test URL and report if it can be read .
406
10
23,331
private void fillDataStructures ( ) { final ITreeData treeData = mRtx . getNode ( ) ; mInOrder . put ( treeData , true ) ; mDescendants . put ( treeData , 1L ) ; }
Fill data structures .
51
4
23,332
@ SuppressForbidden ( reason = "We want to bind to any address here when checking for free ports" ) public static int getNextFreePort ( int portRangeStart , int portRangeEnd ) throws IOException { for ( int port = portRangeStart ; port <= portRangeEnd ; port ++ ) { try ( ServerSocket sock = new ServerSocket ( ) ) { sock . setReuseAddress ( true ) ; sock . bind ( new InetSocketAddress ( port ) ) ; return port ; } catch ( IOException e ) { // seems to be taken, try next one log . warning ( "Port " + port + " seems to be used already, trying next one..." ) ; } } throw new IOException ( "No free port found in the range of [" + portRangeStart + " - " + portRangeEnd + "]" ) ; }
Method that is used to find the next available port . It used the two constants PORT_RANGE_START and PORT_RANGE_END defined above to limit the range of ports that are tried .
180
44
23,333
protected TimeZone fetchTimeZone ( final String id ) throws TimezonesException { final TaggedTimeZone ttz = fetchTimeZone ( id , null ) ; if ( ttz == null ) { return null ; } register ( id , ttz . tz ) ; return ttz . tz ; }
Fetch a timezone object from the server given the id .
65
13
23,334
void diffMovement ( ) throws TTException { assert mHashKind != null ; assert mNewRtx != null ; assert mOldRtx != null ; assert mDiff != null ; assert mDiffKind != null ; // Check first nodes. if ( mNewRtx . getNode ( ) . getKind ( ) != ROOT ) { if ( mHashKind == HashKind . None || mDiffKind == EDiffOptimized . NO ) { mDiff = diff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } else { mDiff = optimizedDiff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } } // Iterate over new revision. while ( ( mOldRtx . getNode ( ) . getKind ( ) != ROOT && mDiff == EDiff . DELETED ) || moveCursor ( mNewRtx , ERevision . NEW ) ) { if ( mDiff != EDiff . INSERTED ) { moveCursor ( mOldRtx , ERevision . OLD ) ; } if ( mNewRtx . getNode ( ) . getKind ( ) != ROOT || mOldRtx . getNode ( ) . getKind ( ) != ROOT ) { if ( mHashKind == HashKind . None || mDiffKind == EDiffOptimized . NO ) { mDiff = diff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } else { mDiff = optimizedDiff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } } } // Nodes deleted in old rev at the end of the tree. if ( mOldRtx . getNode ( ) . getKind ( ) != ROOT ) { // First time it might be EDiff.INSERTED where the cursor doesn't // move. while ( mDiff == EDiff . INSERTED || moveCursor ( mOldRtx , ERevision . OLD ) ) { if ( mHashKind == HashKind . None || mDiffKind == EDiffOptimized . NO ) { mDiff = diff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } else { mDiff = optimizedDiff ( mNewRtx , mOldRtx , mDepth , EFireDiff . TRUE ) ; } } } done ( ) ; }
Do the diff .
530
4
23,335
boolean moveCursor ( final INodeReadTrx paramRtx , final ERevision paramRevision ) throws TTIOException { assert paramRtx != null ; boolean moved = false ; final ITreeStructData node = ( ( ITreeStructData ) paramRtx . getNode ( ) ) ; if ( node . hasFirstChild ( ) ) { if ( node . getKind ( ) != ROOT && mDiffKind == EDiffOptimized . HASHED && mHashKind != HashKind . None && ( mDiff == EDiff . SAMEHASH || mDiff == EDiff . DELETED ) ) { moved = paramRtx . moveTo ( ( ( ITreeStructData ) paramRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; if ( ! moved ) { moved = moveToFollowingNode ( paramRtx , paramRevision ) ; } } else { moved = paramRtx . moveTo ( ( ( ITreeStructData ) paramRtx . getNode ( ) ) . getFirstChildKey ( ) ) ; if ( moved ) { switch ( paramRevision ) { case NEW : mDepth . incrementNewDepth ( ) ; break ; case OLD : mDepth . incrementOldDepth ( ) ; break ; } } } } else if ( node . hasRightSibling ( ) ) { if ( paramRtx . getNode ( ) . getDataKey ( ) == mRootKey ) { paramRtx . moveTo ( ROOT_NODE ) ; } else { moved = paramRtx . moveTo ( ( ( ITreeStructData ) paramRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; } } else { moved = moveToFollowingNode ( paramRtx , paramRevision ) ; } return moved ; }
Move cursor one node forward in pre order .
387
9
23,336
private boolean moveToFollowingNode ( final INodeReadTrx paramRtx , final ERevision paramRevision ) throws TTIOException { boolean moved = false ; while ( ! ( ( ITreeStructData ) paramRtx . getNode ( ) ) . hasRightSibling ( ) && ( ( ITreeStructData ) paramRtx . getNode ( ) ) . hasParent ( ) && paramRtx . getNode ( ) . getDataKey ( ) != mRootKey ) { moved = paramRtx . moveTo ( paramRtx . getNode ( ) . getParentKey ( ) ) ; if ( moved ) { switch ( paramRevision ) { case NEW : mDepth . decrementNewDepth ( ) ; break ; case OLD : mDepth . decrementOldDepth ( ) ; break ; } } } if ( paramRtx . getNode ( ) . getDataKey ( ) == mRootKey ) { paramRtx . moveTo ( ROOT_NODE ) ; } moved = paramRtx . moveTo ( ( ( ITreeStructData ) paramRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; return moved ; }
Move to next following node .
251
6
23,337
EDiff diff ( final INodeReadTrx paramNewRtx , final INodeReadTrx paramOldRtx , final DepthCounter paramDepth , final EFireDiff paramFireDiff ) throws TTIOException { assert paramNewRtx != null ; assert paramOldRtx != null ; assert paramDepth != null ; EDiff diff = EDiff . SAME ; // Check for modifications. switch ( paramNewRtx . getNode ( ) . getKind ( ) ) { case ROOT : case TEXT : case ELEMENT : if ( ! checkNodes ( paramNewRtx , paramOldRtx ) ) { diff = diffAlgorithm ( paramNewRtx , paramOldRtx , paramDepth ) ; } break ; default : // Do nothing. } if ( paramFireDiff == EFireDiff . TRUE ) { fireDiff ( diff , ( ( ITreeStructData ) paramNewRtx . getNode ( ) ) , ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) , new DiffDepth ( paramDepth . getNewDepth ( ) , paramDepth . getOldDepth ( ) ) ) ; } return diff ; }
Diff of nodes .
243
4
23,338
EDiff optimizedDiff ( final INodeReadTrx paramNewRtx , final INodeReadTrx paramOldRtx , final DepthCounter paramDepth , final EFireDiff paramFireDiff ) throws TTIOException { assert paramNewRtx != null ; assert paramOldRtx != null ; assert paramDepth != null ; EDiff diff = EDiff . SAMEHASH ; // Check for modifications. switch ( paramNewRtx . getNode ( ) . getKind ( ) ) { case ROOT : case TEXT : case ELEMENT : if ( paramNewRtx . getNode ( ) . getDataKey ( ) != paramOldRtx . getNode ( ) . getDataKey ( ) || paramNewRtx . getNode ( ) . getHash ( ) != paramOldRtx . getNode ( ) . getHash ( ) ) { // Check if nodes are the same (even if subtrees may vary). if ( checkNodes ( paramNewRtx , paramOldRtx ) ) { diff = EDiff . SAME ; } else { diff = diffAlgorithm ( paramNewRtx , paramOldRtx , paramDepth ) ; } } break ; default : // Do nothing. } if ( paramFireDiff == EFireDiff . TRUE ) { if ( diff == EDiff . SAMEHASH ) { fireDiff ( EDiff . SAME , ( ( ITreeStructData ) paramNewRtx . getNode ( ) ) , ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) , new DiffDepth ( paramDepth . getNewDepth ( ) , paramDepth . getOldDepth ( ) ) ) ; } else { fireDiff ( diff , ( ( ITreeStructData ) paramNewRtx . getNode ( ) ) , ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) , new DiffDepth ( paramDepth . getNewDepth ( ) , paramDepth . getOldDepth ( ) ) ) ; } } return diff ; }
Optimized diff which skips unnecessary comparsions .
423
12
23,339
private EDiff diffAlgorithm ( final INodeReadTrx paramNewRtx , final INodeReadTrx paramOldRtx , final DepthCounter paramDepth ) throws TTIOException { EDiff diff = null ; // Check if node has been deleted. if ( paramDepth . getOldDepth ( ) > paramDepth . getNewDepth ( ) ) { diff = EDiff . DELETED ; } else if ( checkUpdate ( paramNewRtx , paramOldRtx ) ) { // Check if node has // been updated. diff = EDiff . UPDATED ; } else { // See if one of the right sibling matches. EFoundEqualNode found = EFoundEqualNode . FALSE ; final long key = paramOldRtx . getNode ( ) . getDataKey ( ) ; while ( ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) . hasRightSibling ( ) && paramOldRtx . moveTo ( ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) . getRightSiblingKey ( ) ) && found == EFoundEqualNode . FALSE ) { if ( checkNodes ( paramNewRtx , paramOldRtx ) ) { found = EFoundEqualNode . TRUE ; } } paramOldRtx . moveTo ( key ) ; diff = found . kindOfDiff ( ) ; } assert diff != null ; return diff ; }
Main algorithm to compute diffs between two nodes .
302
10
23,340
boolean checkUpdate ( final INodeReadTrx paramNewRtx , final INodeReadTrx paramOldRtx ) throws TTIOException { assert paramNewRtx != null ; assert paramOldRtx != null ; boolean updated = false ; final long newKey = paramNewRtx . getNode ( ) . getDataKey ( ) ; boolean movedNewRtx = paramNewRtx . moveTo ( ( ( ITreeStructData ) paramNewRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; final long oldKey = paramOldRtx . getNode ( ) . getDataKey ( ) ; boolean movedOldRtx = paramOldRtx . moveTo ( ( ( ITreeStructData ) paramOldRtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; if ( movedNewRtx && movedOldRtx && checkNodes ( paramNewRtx , paramOldRtx ) ) { updated = true ; } else if ( ! movedNewRtx && ! movedOldRtx ) { movedNewRtx = paramNewRtx . moveTo ( paramNewRtx . getNode ( ) . getParentKey ( ) ) ; movedOldRtx = paramOldRtx . moveTo ( paramOldRtx . getNode ( ) . getParentKey ( ) ) ; if ( movedNewRtx && movedOldRtx && checkNodes ( paramNewRtx , paramOldRtx ) ) { updated = true ; } } paramNewRtx . moveTo ( newKey ) ; paramOldRtx . moveTo ( oldKey ) ; if ( ! updated ) { updated = paramNewRtx . getNode ( ) . getDataKey ( ) == paramOldRtx . getNode ( ) . getDataKey ( ) ; } return updated ; }
Check for an update of a node .
389
8
23,341
private void checkArgument ( boolean argument , String msg , Object ... args ) { if ( ! argument ) { throw new IllegalArgumentException ( String . format ( msg , args ) ) ; } }
copy of Guava to avoid including Guava in this core library
42
13
23,342
public void open ( final String queueName ) throws NotificationException { try { final ConnectionFactory connFactory ; final Context ctx = new InitialContext ( pr ) ; /* try { Context jcectx = (Context)ctx.lookup("java:comp/env/"); // Still here - use that if (jcectx != null) { ctx = jcectx; } } catch (NamingException nfe) { // Stay with root } */ try { connFactory = ( ConnectionFactory ) ctx . lookup ( pr . getProperty ( "org.bedework.connection.factory.name" ) ) ; // connFactory = (ConnectionFactory)ctx.lookup(connFactoryName); connection = connFactory . createConnection ( ) ; } catch ( final Throwable t ) { if ( debug ( ) ) { error ( t ) ; } throw new NotificationException ( t ) ; } try { /* Session is not transacted, * uses AUTO_ACKNOWLEDGE for message * acknowledgement */ session = connection . createSession ( useTransactions , ackMode ) ; if ( session == null ) { throw new NotificationException ( "No session created" ) ; } final String qn = pr . getProperty ( "org.bedework.jms.queue.prefix" ) + queueName ; try { ourQueue = ( Queue ) new InitialContext ( ) . lookup ( qn ) ; } catch ( final NamingException ne ) { // Try again with our own context ourQueue = ( Queue ) ctx . lookup ( qn ) ; } } catch ( final Throwable t ) { if ( debug ( ) ) { error ( t ) ; } throw new NotificationException ( t ) ; } } catch ( final NotificationException ne ) { throw ne ; } catch ( final Throwable t ) { if ( debug ( ) ) { error ( t ) ; } throw new NotificationException ( t ) ; } }
Open a connection to the named queue ready to create a producer or consumer .
405
15
23,343
public static void writeln ( final Writer writer , final String string , int indentLevel ) throws IOException { writer . write ( StringUtils . repeat ( "\t" , indentLevel ) + string ) ; writer . write ( "\n" ) ; }
Write out the string and a newline appending a number of tabs to properly indent the resulting text - file .
53
23
23,344
public static void writeHeader ( Writer writer , int dpi , String rankDir , String id , List < String > attribLines ) throws IOException { // Default settings if ( attribLines == null ) { attribLines = new ArrayList <> ( ) ; } else { attribLines = new ArrayList <> ( attribLines ) ; } attribLines . add ( "node [shape=box];" ) ; // add ... // DPI and Rankdir StringBuilder header = new StringBuilder ( "digraph " + id + " {\n" ) ; if ( dpi > 0 ) { header . append ( "dpi=" ) . append ( dpi ) . append ( ";\n" ) ; } header . append ( "rankdir=" ) . append ( StringUtils . isNotBlank ( rankDir ) ? rankDir : "LR" ) . append ( ";\n" ) ; // Additional lines for ( String line : attribLines ) { line = line . trim ( ) ; header . append ( line ) . append ( line . endsWith ( ";" ) ? "\n" : ";\n" ) ; } DotUtils . writeln ( writer , header . toString ( ) ) ; }
Write the header structure .
273
5
23,345
public static void renderGraph ( File dotfile , File resultfile ) throws IOException { // call graphviz-dot via commons-exec CommandLine cmdLine = new CommandLine ( DOT_EXE ) ; cmdLine . addArgument ( "-T" + StringUtils . substringAfterLast ( resultfile . getAbsolutePath ( ) , "." ) ) ; cmdLine . addArgument ( dotfile . getAbsolutePath ( ) ) ; DefaultExecutor executor = new DefaultExecutor ( ) ; executor . setExitValue ( 0 ) ; ExecuteWatchdog watchdog = new ExecuteWatchdog ( 60000 ) ; executor . setWatchdog ( watchdog ) ; try { try ( FileOutputStream out2 = new FileOutputStream ( resultfile ) ) { executor . setStreamHandler ( new PumpStreamHandler ( out2 , System . err ) ) ; int exitValue = executor . execute ( cmdLine ) ; if ( exitValue != 0 ) { throw new IOException ( "Could not convert graph to dot, had exit value: " + exitValue + "!" ) ; } } } catch ( IOException e ) { // if something went wrong the file should not be left behind... if ( ! resultfile . delete ( ) ) { System . out . println ( "Could not delete file " + resultfile ) ; } throw e ; } }
Call graphviz - dot to convert the . dot - file to a rendered graph .
291
18
23,346
public static boolean checkDot ( ) throws IOException { // call graphviz-dot via commons-exec CommandLine cmdLine = new CommandLine ( DOT_EXE ) ; cmdLine . addArgument ( "-V" ) ; DefaultExecutor executor = new DefaultExecutor ( ) ; executor . setExitValue ( 0 ) ; ExecuteWatchdog watchdog = new ExecuteWatchdog ( 60000 ) ; executor . setWatchdog ( watchdog ) ; executor . setStreamHandler ( new PumpStreamHandler ( System . out , System . err ) ) ; int exitValue = executor . execute ( cmdLine ) ; if ( exitValue != 0 ) { System . err . println ( "Could not run '" + DOT_EXE + "', had exit value: " + exitValue + "!" ) ; return false ; } return true ; }
Verify if dot can be started and print out the version to stdout .
183
16
23,347
public static void shredInputStream ( final INodeWriteTrx wtx , final InputStream value , final EShredderInsert child ) { final XMLInputFactory factory = XMLInputFactory . newInstance ( ) ; factory . setProperty ( XMLInputFactory . SUPPORT_DTD , false ) ; XMLEventReader parser ; try { parser = factory . createXMLEventReader ( value ) ; } catch ( final XMLStreamException xmlse ) { throw new WebApplicationException ( xmlse ) ; } try { final XMLShredder shredder = new XMLShredder ( wtx , parser , child ) ; shredder . call ( ) ; } catch ( final Exception exce ) { throw new WebApplicationException ( exce ) ; } }
Shreds a given InputStream
156
7
23,348
public static void closeWTX ( final boolean abortTransaction , final INodeWriteTrx wtx , final ISession ses ) throws TTException { synchronized ( ses ) { if ( abortTransaction ) { wtx . abort ( ) ; } ses . close ( ) ; } }
This method closes all open treetank connections concerning a NodeWriteTrx .
60
16
23,349
public String getContentName ( final Request req ) throws Throwable { UtilActionForm form = req . getForm ( ) ; PresentationState ps = getPresentationState ( req ) ; String contentName = ps . getContentName ( ) ; form . setContentName ( contentName ) ; return contentName ; }
Override this to get the contentName from different sources
67
10
23,350
protected String checkLogOut ( final HttpServletRequest request , final UtilActionForm form ) throws Throwable { final String reqUser = request . getRemoteUser ( ) ; final boolean forceLogout = ! Util . equalsString ( reqUser , form . getCurrentUser ( ) ) ; final String temp = request . getParameter ( requestLogout ) ; if ( forceLogout || ( temp != null ) ) { final HttpSession sess = request . getSession ( false ) ; if ( ( sess != null ) && logOutCleanup ( request , form ) ) { sess . invalidate ( ) ; } return forwardLoggedOut ; } return null ; }
Check for logout request .
145
6
23,351
public void setRefreshInterval ( final HttpServletRequest request , final HttpServletResponse response , final int refreshInterval , final String refreshAction , final UtilActionForm form ) { if ( refreshInterval != 0 ) { StringBuilder sb = new StringBuilder ( 250 ) ; sb . append ( refreshInterval ) ; sb . append ( "; URL=" ) ; sb . append ( form . getUrlPrefix ( ) ) ; if ( ! refreshAction . startsWith ( "/" ) ) { sb . append ( "/" ) ; } sb . append ( refreshAction ) ; response . setHeader ( "Refresh" , sb . toString ( ) ) ; } }
Check request for refresh interval
154
5
23,352
public boolean setAppVar ( final String name , final String val , final HashMap < String , String > appVars ) { if ( val == null ) { appVars . remove ( name ) ; return true ; } if ( appVars . size ( ) > maxAppVars ) { return false ; } appVars . put ( name , val ) ; return true ; }
Called to set an application variable to a value
82
10
23,353
protected String checkConfirmationId ( final HttpServletRequest request , final UtilActionForm form ) throws Throwable { String reqpar = request . getParameter ( "confirmationid" ) ; if ( reqpar == null ) { return null ; } if ( ! reqpar . equals ( form . getConfirmationId ( ) ) ) { return "badConformationId" ; } return null ; }
Check for a confirmation id . This is a random string embedded in some requests to confirm that the incoming request came from a page we generated . Not all pages will have such an id but if we do it must match .
86
44
23,354
protected String getReqPar ( final HttpServletRequest req , final String name ) throws Throwable { return Util . checkNull ( req . getParameter ( name ) ) ; }
Get a request parameter stripped of white space . Return null for zero length .
40
15
23,355
public Status route ( final CharSequence method , final CharSequence path , final Result < T > result ) { result . captor . optionalTrailingSlash ( optionalTrailingSlash ) ; final RouteTarget < T > route = trie . lookup ( path , result . captor ) ; if ( route == null ) { return result . notFound ( ) . status ( ) ; } final Target < T > target = route . lookup ( method ) ; if ( target == null ) { return result . notAllowed ( route ) . status ( ) ; } return result . success ( path , route , target ) . status ( ) ; }
Route a request .
136
4
23,356
private int insertName ( final String pName ) throws TTException { final String string = ( pName == null ? "" : pName ) ; final int nameKey = NamePageHash . generateHashForString ( string ) ; NodeMetaPageFactory . MetaKey key = new NodeMetaPageFactory . MetaKey ( nameKey ) ; NodeMetaPageFactory . MetaValue value = new NodeMetaPageFactory . MetaValue ( string ) ; getPageWtx ( ) . getMetaBucket ( ) . put ( key , value ) ; return nameKey ; }
Setting a new name in the metapage .
116
10
23,357
private void adaptForInsert ( final ITreeData paramNewNode , final boolean addAsFirstChild ) throws TTException { assert paramNewNode != null ; if ( paramNewNode instanceof ITreeStructData ) { final ITreeStructData strucNode = ( ITreeStructData ) paramNewNode ; final ITreeStructData parent = ( ITreeStructData ) getPtx ( ) . getData ( paramNewNode . getParentKey ( ) ) ; parent . incrementChildCount ( ) ; if ( addAsFirstChild ) { parent . setFirstChildKey ( paramNewNode . getDataKey ( ) ) ; } getPtx ( ) . setData ( parent ) ; if ( strucNode . hasRightSibling ( ) ) { final ITreeStructData rightSiblingNode = ( ITreeStructData ) getPtx ( ) . getData ( strucNode . getRightSiblingKey ( ) ) ; rightSiblingNode . setLeftSiblingKey ( paramNewNode . getDataKey ( ) ) ; getPtx ( ) . setData ( rightSiblingNode ) ; } if ( strucNode . hasLeftSibling ( ) ) { final ITreeStructData leftSiblingNode = ( ITreeStructData ) getPtx ( ) . getData ( strucNode . getLeftSiblingKey ( ) ) ; leftSiblingNode . setRightSiblingKey ( paramNewNode . getDataKey ( ) ) ; getPtx ( ) . setData ( leftSiblingNode ) ; } } }
Adapting everything for insert operations .
326
7
23,358
private void adaptForRemove ( final ITreeStructData pOldNode ) throws TTException { assert pOldNode != null ; // Adapt left sibling node if there is one. if ( pOldNode . hasLeftSibling ( ) ) { final ITreeStructData leftSibling = ( ITreeStructData ) getPtx ( ) . getData ( pOldNode . getLeftSiblingKey ( ) ) ; leftSibling . setRightSiblingKey ( pOldNode . getRightSiblingKey ( ) ) ; getPtx ( ) . setData ( leftSibling ) ; } // Adapt right sibling node if there is one. if ( pOldNode . hasRightSibling ( ) ) { final ITreeStructData rightSibling = ( ITreeStructData ) getPtx ( ) . getData ( pOldNode . getRightSiblingKey ( ) ) ; rightSibling . setLeftSiblingKey ( pOldNode . getLeftSiblingKey ( ) ) ; getPtx ( ) . setData ( rightSibling ) ; } // Adapt parent, if node has now left sibling it is a first child. final ITreeStructData parent = ( ITreeStructData ) getPtx ( ) . getData ( pOldNode . getParentKey ( ) ) ; if ( ! pOldNode . hasLeftSibling ( ) ) { parent . setFirstChildKey ( pOldNode . getRightSiblingKey ( ) ) ; } parent . decrementChildCount ( ) ; getPtx ( ) . setData ( parent ) ; if ( pOldNode . getKind ( ) == IConstants . ELEMENT ) { // removing attributes for ( int i = 0 ; i < ( ( ElementNode ) pOldNode ) . getAttributeCount ( ) ; i ++ ) { moveTo ( ( ( ElementNode ) pOldNode ) . getAttributeKey ( i ) ) ; getPtx ( ) . removeData ( mDelegate . getCurrentNode ( ) ) ; } // removing namespaces moveTo ( pOldNode . getDataKey ( ) ) ; for ( int i = 0 ; i < ( ( ElementNode ) pOldNode ) . getNamespaceCount ( ) ; i ++ ) { moveTo ( ( ( ElementNode ) pOldNode ) . getNamespaceKey ( i ) ) ; getPtx ( ) . removeData ( mDelegate . getCurrentNode ( ) ) ; } } // Remove old node. getPtx ( ) . removeData ( pOldNode ) ; }
Adapting everything for remove operations .
535
7
23,359
private void rollingUpdate ( final long paramOldHash ) throws TTException { final ITreeData newNode = mDelegate . getCurrentNode ( ) ; final long newNodeHash = newNode . hashCode ( ) ; long resultNew = newNode . hashCode ( ) ; // go the path to the root do { synchronized ( mDelegate . getCurrentNode ( ) ) { getPtx ( ) . getData ( mDelegate . getCurrentNode ( ) . getDataKey ( ) ) ; if ( mDelegate . getCurrentNode ( ) . getDataKey ( ) == newNode . getDataKey ( ) ) { resultNew = mDelegate . getCurrentNode ( ) . getHash ( ) - paramOldHash ; resultNew = resultNew + newNodeHash ; } else { resultNew = mDelegate . getCurrentNode ( ) . getHash ( ) - ( paramOldHash * PRIME ) ; resultNew = resultNew + newNodeHash * PRIME ; } mDelegate . getCurrentNode ( ) . setHash ( resultNew ) ; getPtx ( ) . setData ( mDelegate . getCurrentNode ( ) ) ; } } while ( moveTo ( mDelegate . getCurrentNode ( ) . getParentKey ( ) ) ) ; mDelegate . setCurrentNode ( newNode ) ; }
Adapting the structure with a rolling hash for all ancestors only with update .
285
15
23,360
private void rollingRemove ( ) throws TTException { final ITreeData startNode = mDelegate . getCurrentNode ( ) ; long hashToRemove = startNode . getHash ( ) ; long hashToAdd = 0 ; long newHash = 0 ; // go the path to the root do { synchronized ( mDelegate . getCurrentNode ( ) ) { getPtx ( ) . getData ( mDelegate . getCurrentNode ( ) . getDataKey ( ) ) ; if ( mDelegate . getCurrentNode ( ) . getDataKey ( ) == startNode . getDataKey ( ) ) { // the begin node is always null newHash = 0 ; } else if ( mDelegate . getCurrentNode ( ) . getDataKey ( ) == startNode . getParentKey ( ) ) { // the parent node is just removed newHash = mDelegate . getCurrentNode ( ) . getHash ( ) - ( hashToRemove * PRIME ) ; hashToRemove = mDelegate . getCurrentNode ( ) . getHash ( ) ; } else { // the ancestors are all touched regarding the modification newHash = mDelegate . getCurrentNode ( ) . getHash ( ) - ( hashToRemove * PRIME ) ; newHash = newHash + hashToAdd * PRIME ; hashToRemove = mDelegate . getCurrentNode ( ) . getHash ( ) ; } mDelegate . getCurrentNode ( ) . setHash ( newHash ) ; hashToAdd = newHash ; getPtx ( ) . setData ( mDelegate . getCurrentNode ( ) ) ; } } while ( moveTo ( mDelegate . getCurrentNode ( ) . getParentKey ( ) ) ) ; mDelegate . setCurrentNode ( startNode ) ; }
Adapting the structure with a rolling hash for all ancestors only with remove .
378
15
23,361
public static String buildName ( final QName pQName ) { if ( pQName == null ) { throw new NullPointerException ( "mQName must not be null!" ) ; } String name ; if ( pQName . getPrefix ( ) . isEmpty ( ) ) { name = pQName . getLocalPart ( ) ; } else { name = new StringBuilder ( pQName . getPrefix ( ) ) . append ( ":" ) . append ( pQName . getLocalPart ( ) ) . toString ( ) ; } return name ; }
Building name consisting out of prefix and name . NamespaceUri is not used over here .
124
19
23,362
public static StringBuilder concat ( final String ... message ) { final StringBuilder builder = new StringBuilder ( ) ; for ( final String mess : message ) { builder . append ( mess ) ; builder . append ( " " ) ; } return builder ; }
Util method to provide StringBuilder functionality .
53
9
23,363
public IMetaEntry put ( final IMetaEntry pKey , final IMetaEntry pVal ) { return mMetaMap . put ( pKey , pVal ) ; }
Putting an entry to the map .
36
7
23,364
@ Override public Void call ( ) throws TTException { updateOnly ( ) ; if ( mCommit == EShredderCommit . COMMIT ) { mWtx . commit ( ) ; } return null ; }
Invoking the shredder .
47
6
23,365
private void processStartTag ( final StartElement paramElem ) throws IOException , XMLStreamException , TTException { assert paramElem != null ; // Initialize variables. initializeVars ( ) ; // Main algorithm to determine if same, insert or a delete has to be // made. algorithm ( paramElem ) ; if ( mFound && mIsRightSibling ) { mDelete = EDelete . ATSTARTMIDDLE ; deleteNode ( ) ; } else if ( ! mFound ) { // Increment levels. mLevelInToShredder ++ ; insertElementNode ( paramElem ) ; } else if ( mFound ) { // Increment levels. mLevelInToShredder ++ ; sameElementNode ( ) ; } }
Process start tag .
160
4
23,366
private void processCharacters ( final Characters paramText ) throws IOException , XMLStreamException , TTException { assert paramText != null ; // Initialize variables. initializeVars ( ) ; final String text = paramText . getData ( ) . toString ( ) ; if ( ! text . isEmpty ( ) ) { // Main algorithm to determine if same, insert or a delete has to be // made. algorithm ( paramText ) ; if ( mFound && mIsRightSibling ) { /* * Cannot happen because if text node after end tag get's * deleted it's done already while parsing the end tag. If text * node should be deleted at the top of a subtree (right after a * start tag has been parsed) it's done in * processStartTag(StartElement). */ // mDelete = EDelete.ATSTARTMIDDLE; // deleteNode(); throw new AssertionError ( "" ) ; } else if ( ! mFound ) { insertTextNode ( paramText ) ; } else if ( mFound ) { sameTextNode ( ) ; } } }
Process characters .
228
3
23,367
private void processEndTag ( ) throws XMLStreamException , TTException { mLevelInToShredder -- ; if ( mInserted ) { mInsertedEndTag = true ; } if ( mRemovedNode ) { mRemovedNode = false ; } else { // Move cursor to parent. if ( mWtx . getNode ( ) . getDataKey ( ) == mLastNodeKey ) { /* * An end tag must have been parsed immediately before and it * must have been an empty element at the end of a subtree, thus * move this time to parent node. */ assert mWtx . getNode ( ) . hasParent ( ) && mWtx . getNode ( ) . getKind ( ) == IConstants . ELEMENT ; mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; } else { if ( mWtx . getNode ( ) . getKind ( ) == IConstants . ELEMENT ) { final ElementNode element = ( ElementNode ) mWtx . getNode ( ) ; if ( element . hasFirstChild ( ) && element . hasParent ( ) ) { // It's not an empty element, thus move to parent. mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; } // } else { // checkIfLastNode(true); // } } else if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasParent ( ) ) { if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasRightSibling ( ) ) { mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; /* * Means next event is an end tag in StAX reader, but * something different where the Treetank transaction * points to, which also means it has to be deleted. */ mKeyMatches = - 1 ; mDelete = EDelete . ATBOTTOM ; deleteNode ( ) ; } mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; } } mLastNodeKey = mWtx . getNode ( ) . getDataKey ( ) ; // Move cursor to right sibling if it has one. if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasRightSibling ( ) ) { mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; mMovedToRightSibling = true ; skipWhitespaces ( mReader ) ; if ( mReader . peek ( ) . getEventType ( ) == XMLStreamConstants . END_ELEMENT ) { /* * Means next event is an end tag in StAX reader, but * something different where the Treetank transaction points * to, which also means it has to be deleted. */ mKeyMatches = - 1 ; mDelete = EDelete . ATBOTTOM ; deleteNode ( ) ; } } else { mMovedToRightSibling = false ; } } }
Process end tag .
685
4
23,368
private void algorithm ( final XMLEvent paramEvent ) throws IOException , XMLStreamException , TTIOException { assert paramEvent != null ; do { /* * Check if a node in the shreddered file on the same level equals * the current element node. */ if ( paramEvent . isStartElement ( ) ) { mFound = checkElement ( paramEvent . asStartElement ( ) ) ; } else if ( paramEvent . isCharacters ( ) ) { mFound = checkText ( paramEvent . asCharacters ( ) ) ; } if ( mWtx . getNode ( ) . getDataKey ( ) != mNodeKey ) { mIsRightSibling = true ; } mKeyMatches = mWtx . getNode ( ) . getDataKey ( ) ; // // if (mFound && mIsRightSibling) { // /* // * Root element of next subtree in shreddered file matches // * so check all descendants. If they match the node must be // * inserted. // */ // switch (paramEvent.getEventType()) { // case XMLStreamConstants.START_ELEMENT: // mMoved = EMoved.FIRSTNODE; // //mFound = checkDescendants(paramEvent.asStartElement()); // mFound = checkDescendants(paramEvent.asStartElement()); // break; // case XMLStreamConstants.CHARACTERS: // mFound = checkText(paramEvent.asCharacters()); // break; // default: // // throw new // AssertionError("Node type not known or not implemented!"); // } // mWtx.moveTo(mKeyMatches); // } } while ( ! mFound && mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ) ; mWtx . moveTo ( mNodeKey ) ; }
Main algorithm to determine if nodes are equal have to be inserted or have to be removed .
405
18
23,369
private boolean checkText ( final Characters paramEvent ) { assert paramEvent != null ; final String text = paramEvent . getData ( ) . trim ( ) ; return mWtx . getNode ( ) . getKind ( ) == IConstants . TEXT && mWtx . getValueOfCurrentNode ( ) . equals ( text ) ; }
Check if text event and text in Treetank storage match .
72
13
23,370
private void sameTextNode ( ) throws TTIOException , XMLStreamException { // Update variables. mInsert = EInsert . NOINSERT ; mDelete = EDelete . NODELETE ; mInserted = false ; mInsertedEndTag = false ; mRemovedNode = false ; // Check if last node reached. // checkIfLastNode(false); // Skip whitespace events. skipWhitespaces ( mReader ) ; // Move to right sibling if next node isn't an end tag. if ( mReader . peek ( ) . getEventType ( ) != XMLStreamConstants . END_ELEMENT ) { // // Check if next node matches or not. // boolean found = false; // if (mReader.peek().getEventType() == // XMLStreamConstants.START_ELEMENT) { // found = checkElement(mReader.peek().asStartElement()); // } else if (mReader.peek().getEventType() == // XMLStreamConstants.CHARACTERS) { // found = checkText(mReader.peek().asCharacters()); // } // // // If next node doesn't match/isn't the same move on. // if (!found) { if ( mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ) { mMovedToRightSibling = true ; } else { mMovedToRightSibling = false ; } // } } mInsert = EInsert . ATMIDDLEBOTTOM ; }
In case they are the same nodes move cursor to next node and update stack .
336
16
23,371
private void sameElementNode ( ) throws XMLStreamException , TTException { // Update variables. mInsert = EInsert . NOINSERT ; mDelete = EDelete . NODELETE ; mInserted = false ; mInsertedEndTag = false ; mRemovedNode = false ; // Check if last node reached. // checkIfLastNode(false); // Skip whitespace events. skipWhitespaces ( mReader ) ; // Move transaction. final ElementNode element = ( ElementNode ) mWtx . getNode ( ) ; if ( element . hasFirstChild ( ) ) { /* * If next event needs to be inserted, it has to be inserted at the * top of the subtree, as first child. */ mInsert = EInsert . ATTOP ; mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getFirstChildKey ( ) ) ; if ( mReader . peek ( ) . getEventType ( ) == XMLStreamConstants . END_ELEMENT ) { /* * Next event is an end tag, so the current child element, where * the transaction currently is located needs to be removed. */ mKeyMatches = - 1 ; mDelete = EDelete . ATBOTTOM ; deleteNode ( ) ; } // } else if (mReader.peek().getEventType() == // XMLStreamConstants.END_ELEMENT // && // !mReader.peek().asEndElement().getName().equals(mWtx.getQNameOfCurrentNode())) // { // /* // * Node must be removed when next end tag doesn't match the // current name and it has no children. // */ // mKeyMatches = -1; // mDelete = EDelete.ATBOTTOM; // deleteNode(); } else if ( mReader . peek ( ) . getEventType ( ) != XMLStreamConstants . END_ELEMENT ) { /* * Treetank transaction can't find a child node, but StAX parser * finds one, so it must be inserted as a first child of the current * node. */ mInsert = EInsert . ATTOP ; mEmptyElement = true ; } else { mInsert = EInsert . ATMIDDLEBOTTOM ; } }
Nodes match thus update stack and move cursor to first child if it is not a leaf node .
486
20
23,372
private void skipWhitespaces ( final XMLEventReader paramReader ) throws XMLStreamException { while ( paramReader . peek ( ) . getEventType ( ) == XMLStreamConstants . CHARACTERS && paramReader . peek ( ) . asCharacters ( ) . isWhiteSpace ( ) ) { paramReader . nextEvent ( ) ; } }
Skip any whitespace event .
74
6
23,373
private void insertElementNode ( final StartElement paramElement ) throws TTException , XMLStreamException { assert paramElement != null ; /* * Add node if it's either not found among right siblings (and the * cursor on the shreddered file is on a right sibling) or if it's not * found in the structure and it is a new last right sibling. */ mDelete = EDelete . NODELETE ; mRemovedNode = false ; switch ( mInsert ) { case ATTOP : // We are at the top of a subtree, no end tag has been parsed // before. if ( ! mEmptyElement ) { // Has to be inserted on the parent node. mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; } // Insert element as first child. addNewElement ( EAdd . ASFIRSTCHILD , paramElement ) ; mInsert = EInsert . INTERMEDIATE ; break ; case INTERMEDIATE : // Inserts have been made before. EAdd insertNode = EAdd . ASFIRSTCHILD ; if ( mInsertedEndTag ) { /* * An end tag has been read while inserting, thus insert node as * right sibling of parent node. */ mInsertedEndTag = false ; insertNode = EAdd . ASRIGHTSIBLING ; } // Possibly move one sibling back if transaction already moved to // next node. if ( mMovedToRightSibling ) { mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getLeftSiblingKey ( ) ) ; } // Make sure if transaction is on a text node the node is inserted // as a right sibling. if ( mWtx . getNode ( ) . getKind ( ) == IConstants . TEXT ) { insertNode = EAdd . ASRIGHTSIBLING ; } addNewElement ( insertNode , paramElement ) ; break ; case ATMIDDLEBOTTOM : // Insert occurs at the middle or end of a subtree. // Move one sibling back. if ( mMovedToRightSibling ) { mMovedToRightSibling = false ; mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getLeftSiblingKey ( ) ) ; } // Insert element as right sibling. addNewElement ( EAdd . ASRIGHTSIBLING , paramElement ) ; mInsert = EInsert . INTERMEDIATE ; break ; default : throw new AssertionError ( "Enum value not known!" ) ; } mInserted = true ; }
Insert an element node .
558
5
23,374
private void insertTextNode ( final Characters paramText ) throws TTException , XMLStreamException { assert paramText != null ; /* * Add node if it's either not found among right siblings (and the * cursor on the shreddered file is on a right sibling) or if it's not * found in the structure and it is a new last right sibling. */ mDelete = EDelete . NODELETE ; mRemovedNode = false ; switch ( mInsert ) { case ATTOP : // Insert occurs at the top of a subtree (no end tag has been parsed // immediately before). // Move to parent. mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; // Insert as first child. addNewText ( EAdd . ASFIRSTCHILD , paramText ) ; // Move to next node if no end tag follows (thus cursor isn't moved // to parent in processEndTag()). if ( mReader . peek ( ) . getEventType ( ) != XMLStreamConstants . END_ELEMENT ) { if ( mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ) { mMovedToRightSibling = true ; } else { mMovedToRightSibling = false ; } } else if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasRightSibling ( ) ) { mMovedToRightSibling = false ; mInserted = true ; mKeyMatches = - 1 ; mDelete = EDelete . ATBOTTOM ; deleteNode ( ) ; } mInsert = EInsert . INTERMEDIATE ; break ; case INTERMEDIATE : // Inserts have been made before. EAdd addNode = EAdd . ASFIRSTCHILD ; if ( mInsertedEndTag ) { /* * An end tag has been read while inserting, so move back to * left sibling if there is one and insert as right sibling. */ if ( mMovedToRightSibling ) { mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getLeftSiblingKey ( ) ) ; } addNode = EAdd . ASRIGHTSIBLING ; mInsertedEndTag = false ; } // Insert element as right sibling. addNewText ( addNode , paramText ) ; // Move to next node if no end tag follows (thus cursor isn't moved // to parent in processEndTag()). if ( mReader . peek ( ) . getEventType ( ) != XMLStreamConstants . END_ELEMENT ) { if ( mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ) { mMovedToRightSibling = true ; } else { mMovedToRightSibling = false ; } } break ; case ATMIDDLEBOTTOM : // Insert occurs in the middle or end of a subtree. // Move one sibling back. if ( mMovedToRightSibling ) { mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getLeftSiblingKey ( ) ) ; } // Insert element as right sibling. addNewText ( EAdd . ASRIGHTSIBLING , paramText ) ; // Move to next node. mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; mInsert = EInsert . INTERMEDIATE ; break ; default : throw new AssertionError ( "Enum value not known!" ) ; } mInserted = true ; }
Insert a text node .
804
5
23,375
private void deleteNode ( ) throws TTException { /* * If found in one of the rightsiblings in the current shreddered * structure remove all nodes until the transaction points to the found * node (keyMatches). */ if ( mInserted && ! mMovedToRightSibling ) { mInserted = false ; if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasRightSibling ( ) ) { // Cursor is on the inserted node, so move to right sibling. mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; } } // // Check if transaction is on the last node in the shreddered file. // checkIfLastNode(true); // Determines if transaction has moved to the parent node after a delete // operation. boolean movedToParent = false ; // Determines if ldeleteNodeast node in a subtree is going to be // deleted. boolean isLast = false ; do { if ( mWtx . getNode ( ) . getDataKey ( ) != mKeyMatches ) { final ITreeStructData node = ( ITreeStructData ) mWtx . getNode ( ) ; if ( ! node . hasRightSibling ( ) && ! node . hasLeftSibling ( ) ) { // if (mDelete == EDelete.ATSTARTMIDDLE) { // // If the delete occurs right before an end tag the // // level hasn't been incremented. // mLevelInShreddered--; // } /* * Node has no right and no left sibling, so the transaction * moves to the parent after the delete. */ movedToParent = true ; } else if ( ! node . hasRightSibling ( ) ) { // Last node has been reached, which means that the // transaction moves to the left sibling. isLast = true ; } mWtx . remove ( ) ; } } while ( mWtx . getNode ( ) . getDataKey ( ) != mKeyMatches && ! movedToParent && ! isLast ) ; if ( movedToParent ) { if ( mDelete == EDelete . ATBOTTOM ) { /* * Deleted right before an end tag has been parsed, thus don't * move transaction to next node in processEndTag(). */ mRemovedNode = true ; } /* * Treetank transaction has been moved to parent, because all child * nodes have been deleted, thus to right sibling. */ mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; } else { if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasFirstChild ( ) ) { if ( mDelete == EDelete . ATBOTTOM && isLast ) { /* * Deleted right before an end tag has been parsed, thus * don't move transaction to next node in processEndTag(). */ mRemovedNode = true ; } if ( isLast ) { // If last node of a subtree has been removed, move to // parent and right sibling. mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; // // If the delete occurs right before an end tag the level // // hasn't been incremented. // if (mDelete == EDelete.ATSTARTMIDDLE) { // mLevelInShreddered--; // } } } } // Check if transaction is on the last node in the shreddered file. // checkIfLastNode(true); mInsert = EInsert . NOINSERT ; }
Delete node .
820
3
23,376
private void initializeVars ( ) { mNodeKey = mWtx . getNode ( ) . getDataKey ( ) ; mFound = false ; mIsRightSibling = false ; mKeyMatches = - 1 ; }
Initialize variables needed for the main algorithm .
49
9
23,377
private boolean checkElement ( final StartElement mEvent ) throws TTIOException { assert mEvent != null ; boolean retVal = false ; // Matching element names? if ( mWtx . getNode ( ) . getKind ( ) == IConstants . ELEMENT && mWtx . getQNameOfCurrentNode ( ) . equals ( mEvent . getName ( ) ) ) { // Check if atts and namespaces are the same. final long nodeKey = mWtx . getNode ( ) . getDataKey ( ) ; // Check attributes. boolean foundAtts = false ; boolean hasAtts = false ; for ( final Iterator < ? > it = mEvent . getAttributes ( ) ; it . hasNext ( ) ; ) { hasAtts = true ; final Attribute attribute = ( Attribute ) it . next ( ) ; for ( int i = 0 , attCount = ( ( ElementNode ) mWtx . getNode ( ) ) . getAttributeCount ( ) ; i < attCount ; i ++ ) { mWtx . moveToAttribute ( i ) ; if ( attribute . getName ( ) . equals ( mWtx . getQNameOfCurrentNode ( ) ) && attribute . getValue ( ) . equals ( mWtx . getValueOfCurrentNode ( ) ) ) { foundAtts = true ; mWtx . moveTo ( nodeKey ) ; break ; } mWtx . moveTo ( nodeKey ) ; } if ( ! foundAtts ) { break ; } } if ( ! hasAtts && ( ( ElementNode ) mWtx . getNode ( ) ) . getAttributeCount ( ) == 0 ) { foundAtts = true ; } // Check namespaces. boolean foundNamesps = false ; boolean hasNamesps = false ; for ( final Iterator < ? > namespIt = mEvent . getNamespaces ( ) ; namespIt . hasNext ( ) ; ) { hasNamesps = true ; final Namespace namespace = ( Namespace ) namespIt . next ( ) ; for ( int i = 0 , namespCount = ( ( ElementNode ) mWtx . getNode ( ) ) . getNamespaceCount ( ) ; i < namespCount ; i ++ ) { mWtx . moveToNamespace ( i ) ; final ITreeNameData namenode = ( ITreeNameData ) mWtx . getNode ( ) ; if ( namespace . getNamespaceURI ( ) . equals ( mWtx . nameForKey ( namenode . getURIKey ( ) ) ) && namespace . getPrefix ( ) . equals ( mWtx . nameForKey ( namenode . getNameKey ( ) ) ) ) { foundNamesps = true ; mWtx . moveTo ( nodeKey ) ; break ; } mWtx . moveTo ( nodeKey ) ; } if ( ! foundNamesps ) { break ; } } if ( ! hasNamesps && ( ( ElementNode ) mWtx . getNode ( ) ) . getNamespaceCount ( ) == 0 ) { foundNamesps = true ; } // Check if atts and namespaces are the same. if ( foundAtts && foundNamesps ) { retVal = true ; } else { retVal = false ; } } return retVal ; }
Check if current element matches the element in the shreddered file .
704
14
23,378
@ Override @ SuppressWarnings ( { "unchecked" } ) public synchronized Enumeration < Object > keys ( ) { Enumeration < Object > keysEnum = super . keys ( ) ; @ SuppressWarnings ( "rawtypes" ) Vector keyList = new Vector <> ( ) ; // NOPMD - vector used on purpose here... while ( keysEnum . hasMoreElements ( ) ) { keyList . add ( keysEnum . nextElement ( ) ) ; } Collections . sort ( keyList ) ; // reverse this list to have the newest items on top Collections . reverse ( keyList ) ; return keyList . elements ( ) ; }
Overrides called by the store method .
145
9
23,379
public void setException ( Throwable t ) { if ( err == null ) { t . printStackTrace ( ) ; } else { err . emit ( t ) ; } }
Can be called by a page to signal an exceptiuon
38
12
23,380
public boolean processErrors ( MessageEmit err ) { if ( valErrors . size ( ) == 0 ) { return false ; } for ( ValError ve : valErrors ) { processError ( err , ve ) ; } valErrors . clear ( ) ; return true ; }
processErrors is called to determine if there were any errors . If so processError is called for each error adn the errors vector is cleared . Override the processError method to emit custom messages .
61
41
23,381
public static boolean createResource ( String name , AbstractModule module ) throws StorageAlreadyExistsException , TTException { File file = new File ( ROOT_PATH ) ; File storageFile = new File ( STORAGE_PATH ) ; if ( ! file . exists ( ) || ! storageFile . exists ( ) ) { file . mkdirs ( ) ; StorageConfiguration configuration = new StorageConfiguration ( storageFile ) ; // Creating and opening the storage. // Making it ready for usage. Storage . truncateStorage ( configuration ) ; Storage . createStorage ( configuration ) ; } IStorage storage = Storage . openStorage ( storageFile ) ; Injector injector = Guice . createInjector ( module ) ; IBackendFactory backend = injector . getInstance ( IBackendFactory . class ) ; IRevisioning revision = injector . getInstance ( IRevisioning . class ) ; Properties props = StandardSettings . getProps ( storageFile . getAbsolutePath ( ) , name ) ; ResourceConfiguration mResourceConfig = new ResourceConfiguration ( props , backend , revision , new FileDataFactory ( ) , new FilelistenerMetaDataFactory ( ) ) ; storage . createResource ( mResourceConfig ) ; return true ; }
Create a new storage with the given name and backend .
259
11
23,382
public static List < String > getResources ( ) { File resources = new File ( STORAGE_PATH + File . separator + "/resources" ) ; File [ ] children = resources . listFiles ( ) ; if ( children == null ) { return new ArrayList < String > ( ) ; } List < String > storages = new ArrayList < String > ( ) ; for ( int i = 0 ; i < children . length ; i ++ ) { if ( children [ i ] . isDirectory ( ) ) storages . add ( children [ i ] . getName ( ) ) ; } return storages ; }
Retrieve a list of all storages .
132
10
23,383
public static ISession getSession ( String resourceName ) throws ResourceNotExistingException , TTException { File storageFile = new File ( STORAGE_PATH ) ; ISession session = null ; if ( ! storageFile . exists ( ) ) { throw new ResourceNotExistingException ( ) ; } else { new StorageConfiguration ( storageFile ) ; IStorage storage = Storage . openStorage ( storageFile ) ; session = storage . getSession ( new SessionConfiguration ( resourceName , null ) ) ; } return session ; }
Retrieve a session from the system for the given Storagename
108
14
23,384
public static void removeResource ( String pResourceName ) throws TTException , ResourceNotExistingException { ISession session = getSession ( pResourceName ) ; session . truncate ( ) ; }
Via this method you can remove a storage from the system .
41
12
23,385
private void generateElement ( final INodeReadTrx paramRtx ) throws TTIOException { final AttributesImpl atts = new AttributesImpl ( ) ; final long key = paramRtx . getNode ( ) . getDataKey ( ) ; try { // Process namespace nodes. for ( int i = 0 , namesCount = ( ( ElementNode ) paramRtx . getNode ( ) ) . getNamespaceCount ( ) ; i < namesCount ; i ++ ) { paramRtx . moveToNamespace ( i ) ; final QName qName = paramRtx . getQNameOfCurrentNode ( ) ; mContHandler . startPrefixMapping ( qName . getPrefix ( ) , qName . getNamespaceURI ( ) ) ; final String mURI = qName . getNamespaceURI ( ) ; if ( qName . getLocalPart ( ) . length ( ) == 0 ) { atts . addAttribute ( mURI , "xmlns" , "xmlns" , "CDATA" , mURI ) ; } else { atts . addAttribute ( mURI , "xmlns" , "xmlns:" + paramRtx . getQNameOfCurrentNode ( ) . getLocalPart ( ) , "CDATA" , mURI ) ; } paramRtx . moveTo ( key ) ; } // Process attributes. for ( int i = 0 , attCount = ( ( ElementNode ) paramRtx . getNode ( ) ) . getAttributeCount ( ) ; i < attCount ; i ++ ) { paramRtx . moveToAttribute ( i ) ; final QName qName = paramRtx . getQNameOfCurrentNode ( ) ; final String mURI = qName . getNamespaceURI ( ) ; atts . addAttribute ( mURI , qName . getLocalPart ( ) , NodeWriteTrx . buildName ( qName ) , paramRtx . getTypeOfCurrentNode ( ) , paramRtx . getValueOfCurrentNode ( ) ) ; paramRtx . moveTo ( key ) ; } // Create SAX events. final QName qName = paramRtx . getQNameOfCurrentNode ( ) ; mContHandler . startElement ( qName . getNamespaceURI ( ) , qName . getLocalPart ( ) , NodeWriteTrx . buildName ( qName ) , atts ) ; // Empty elements. if ( ! ( ( ElementNode ) paramRtx . getNode ( ) ) . hasFirstChild ( ) ) { mContHandler . endElement ( qName . getNamespaceURI ( ) , qName . getLocalPart ( ) , NodeWriteTrx . buildName ( qName ) ) ; } } catch ( final SAXException exc ) { exc . printStackTrace ( ) ; } }
Generate a start element event .
595
7
23,386
private void generateText ( final INodeReadTrx paramRtx ) { try { mContHandler . characters ( paramRtx . getValueOfCurrentNode ( ) . toCharArray ( ) , 0 , paramRtx . getValueOfCurrentNode ( ) . length ( ) ) ; } catch ( final SAXException exc ) { exc . printStackTrace ( ) ; } }
Generate a text event .
82
6
23,387
@ SuppressWarnings ( "unchecked" ) public static Object getMBean ( final Class c , final String name ) throws Throwable { final MBeanServer server = getMbeanServer ( ) ; // return MBeanProxyExt.create(c, name, server); return JMX . newMBeanProxy ( server , new ObjectName ( name ) , c ) ; }
Create a proxy to the given mbean
85
8
23,388
public static void register ( final Server s ) { final ServletHolder sh = new ServletHolder ( ServletContainer . class ) ; sh . setInitParameter ( "com.sun.jersey.config.property.resourceConfigClass" , "com.sun.jersey.api.core.PackagesResourceConfig" ) ; sh . setInitParameter ( "com.sun.jersey.config.property.packages" , "org.jaxrx.resource" ) ; new Context ( s , "/" , Context . SESSIONS ) . addServlet ( sh , "/" ) ; }
Constructor attaching JAX - RX to the specified server .
130
12
23,389
public IData getData ( final long pDataKey ) throws TTIOException { checkArgument ( pDataKey >= 0 ) ; checkState ( ! mClose , "Transaction already closed" ) ; // Calculate bucket and data part for given datakey. final long seqBucketKey = pDataKey >> IConstants . INDIRECT_BUCKET_COUNT [ 3 ] ; final int dataBucketOffset = dataBucketOffset ( pDataKey ) ; DataBucket bucket = mCache . getIfPresent ( seqBucketKey ) ; if ( bucket == null ) { final List < DataBucket > listRevs = getSnapshotBuckets ( seqBucketKey ) ; final DataBucket [ ] revs = listRevs . toArray ( new DataBucket [ listRevs . size ( ) ] ) ; checkState ( revs . length > 0 , "Number of Buckets to reconstruct must be larger than 0" ) ; // Build up the complete bucket. final IRevisioning revision = mSession . getConfig ( ) . mRevision ; bucket = revision . combineBuckets ( revs ) ; mCache . put ( seqBucketKey , bucket ) ; } final IData returnVal = bucket . getData ( dataBucketOffset ) ; // root-fsys is excluded from the checkagainst deletion based on the necesssity of the data-layer to // reference against this data while creation of the transaction if ( pDataKey == 0 ) { return returnVal ; } else { return checkItemIfDeleted ( returnVal ) ; } }
Getting the data related to the given data key .
339
10
23,390
public boolean close ( ) throws TTIOException { if ( ! mClose ) { mSession . deregisterBucketTrx ( this ) ; mBucketReader . close ( ) ; mClose = true ; return true ; } else { return false ; } }
Closing this Readtransaction .
55
7
23,391
protected final List < DataBucket > getSnapshotBuckets ( final long pSeqDataBucketKey ) throws TTIOException { // Return Value, since the revision iterates a flexible number of version, this has to be a list // first. final List < DataBucket > dataBuckets = new ArrayList < DataBucket > ( ) ; // Getting the keys for the revRoots final long [ ] pathToRoot = BucketReadTrx . dereferenceLeafOfTree ( mBucketReader , mUberBucket . getReferenceKeys ( ) [ IReferenceBucket . GUARANTEED_INDIRECT_OFFSET ] , mRootBucket . getRevision ( ) ) ; final RevisionRootBucket rootBucket = ( RevisionRootBucket ) mBucketReader . read ( pathToRoot [ IConstants . INDIRECT_BUCKET_COUNT . length ] ) ; final int numbersToRestore = Integer . parseInt ( mSession . getConfig ( ) . mProperties . getProperty ( ConstructorProps . NUMBERTORESTORE ) ) ; // starting from the current databucket final long [ ] pathToRecentBucket = dereferenceLeafOfTree ( mBucketReader , rootBucket . getReferenceKeys ( ) [ IReferenceBucket . GUARANTEED_INDIRECT_OFFSET ] , pSeqDataBucketKey ) ; DataBucket bucket ; long bucketKey = pathToRecentBucket [ IConstants . INDIRECT_BUCKET_COUNT . length ] ; // jumping through the databuckets based on the pointers while ( dataBuckets . size ( ) < numbersToRestore && bucketKey > - 1 ) { bucket = ( DataBucket ) mBucketReader . read ( bucketKey ) ; dataBuckets . add ( bucket ) ; bucketKey = bucket . getLastBucketPointer ( ) ; } // check if bucket was ever written before to perform check if ( bucketKey > - 1 ) { checkStructure ( mBucketReader , pathToRecentBucket , rootBucket , pSeqDataBucketKey ) ; checkStructure ( mBucketReader , pathToRoot , mUberBucket , mRootBucket . getRevision ( ) ) ; } return dataBuckets ; }
Dereference data bucket reference .
501
7
23,392
protected static final int dataBucketOffset ( final long pDataKey ) { // INDIRECT_BUCKET_COUNT[3] is only taken to get the difference between 2^7 and the actual // datakey as offset. It has nothing to do with the levels. final long dataBucketOffset = ( pDataKey - ( ( pDataKey >> IConstants . INDIRECT_BUCKET_COUNT [ 3 ] ) << IConstants . INDIRECT_BUCKET_COUNT [ 3 ] ) ) ; return ( int ) dataBucketOffset ; }
Calculate data bucket offset for a given data key .
127
12
23,393
protected static final long [ ] dereferenceLeafOfTree ( final IBackendReader pReader , final long pStartKey , final long pSeqBucketKey ) throws TTIOException { final long [ ] orderNumber = getOrderNumbers ( pSeqBucketKey ) ; // Initial state pointing to the indirect bucket of level 0. final long [ ] keys = new long [ IConstants . INDIRECT_BUCKET_COUNT . length + 1 ] ; IndirectBucket bucket = null ; keys [ 0 ] = pStartKey ; // Iterate through all levels... for ( int level = 0 ; level < orderNumber . length ; level ++ ) { // ..read the buckets and.. bucket = ( IndirectBucket ) pReader . read ( keys [ level ] ) ; // ..compute the offsets out of the order-numbers pre-computed before and store it in the // key-array. keys [ level + 1 ] = bucket . getReferenceKeys ( ) [ dataBucketOffset ( orderNumber [ level ] ) ] ; // if the bucketKey is 0, return -1 to distinguish mark non-written buckets explicitly. if ( keys [ level + 1 ] == 0 ) { Arrays . fill ( keys , - 1 ) ; return keys ; } } // Return reference to leaf of indirect tree. return keys ; }
Find reference pointing to leaf bucket of an indirect tree .
286
11
23,394
public static OptionsI getOptions ( final String globalPrefix , final String appPrefix , final String optionsFile , final String outerTagName ) throws OptionsException { try { Object o = Class . forName ( envclass ) . newInstance ( ) ; if ( o == null ) { throw new OptionsException ( "Class " + envclass + " not found" ) ; } if ( ! ( o instanceof OptionsI ) ) { throw new OptionsException ( "Class " + envclass + " is not a subclass of " + OptionsI . class . getName ( ) ) ; } OptionsI options = ( OptionsI ) o ; options . init ( globalPrefix , appPrefix , optionsFile , outerTagName ) ; return options ; } catch ( OptionsException ce ) { throw ce ; } catch ( Throwable t ) { throw new OptionsException ( t ) ; } }
Obtain and initialise an options object .
185
9
23,395
public static Options fromStream ( final String globalPrefix , final String appPrefix , final String outerTagName , final InputStream is ) throws OptionsException { Options opts = new Options ( ) ; opts . init ( globalPrefix , appPrefix , is , outerTagName ) ; return opts ; }
Return an object that uses a local set of options parsed from the input stream .
67
16
23,396
private void addLeafLabel ( ) { final int nodeKind = mRtx . getNode ( ) . getKind ( ) ; if ( ! mLeafLabels . containsKey ( nodeKind ) ) { mLeafLabels . put ( nodeKind , new ArrayList < ITreeData > ( ) ) ; } mLeafLabels . get ( nodeKind ) . add ( mRtx . getNode ( ) ) ; }
Add leaf node label .
94
5
23,397
public FilterGlobals getGlobals ( final HttpServletRequest req ) { HttpSession sess = req . getSession ( ) ; if ( sess == null ) { // We're screwed return null ; } Object o = sess . getAttribute ( globalsName ) ; FilterGlobals fg ; if ( o == null ) { fg = newFilterGlobals ( ) ; sess . setAttribute ( globalsName , fg ) ; if ( debug ( ) ) { debug ( "Created new FilterGlobals from session " + sess . getId ( ) ) ; } } else { fg = ( FilterGlobals ) o ; //if (debug()) { // getLogger().debug("Obtained FilterGlobals from session with id " + // sess.getId()); //} } return fg ; }
Get the globals from the session
185
7
23,398
@ PUT @ Consumes ( { MediaType . TEXT_XML , MediaType . APPLICATION_XML } ) public Response putResource ( @ PathParam ( JaxRxConstants . SYSTEM ) final String system , @ PathParam ( JaxRxConstants . RESOURCE ) final String resource , @ Context final HttpHeaders headers , final InputStream xml ) { final JaxRx impl = Systems . getInstance ( system ) ; final String info = impl . update ( xml , new ResourcePath ( resource , headers ) ) ; return Response . created ( null ) . entity ( info ) . build ( ) ; }
This method will be called when a new XML file has to be stored within the database . The user request will be forwarded to this method . Afterwards it creates a response message with the created HTTP status code if the storing has been successful .
134
47
23,399
@ DELETE public Response deleteResource ( @ PathParam ( JaxRxConstants . SYSTEM ) final String system , @ PathParam ( JaxRxConstants . RESOURCE ) final String resource , @ Context final HttpHeaders headers ) { final JaxRx impl = Systems . getInstance ( system ) ; final String info = impl . delete ( new ResourcePath ( resource , headers ) ) ; return Response . ok ( ) . entity ( info ) . build ( ) ; }
This method will be called when an HTTP client sends a DELETE request to delete an existing resource .
104
21