idx int64 0 165k | question stringlengths 73 4.15k | target stringlengths 5 918 | len_question int64 21 890 | len_target int64 3 255 |
|---|---|---|---|---|
33,100 | protected void autoCreateChildren ( Name primaryType , NodeTypes capabilities ) throws ItemExistsException , PathNotFoundException , VersionException , ConstraintViolationException , LockException , RepositoryException { Collection < JcrNodeDefinition > autoChildDefns = capabilities . getAutoCreatedChildNodeDefinitions ( primaryType ) ; if ( ! autoChildDefns . isEmpty ( ) ) { // There is at least one auto-created child under this node ... Set < Name > childNames = new HashSet < Name > ( ) ; for ( JcrNodeDefinition defn : autoChildDefns ) { // Residual definitions cannot be both auto-created and residual; // see Section 3.7.2.3.4 of the JCR 2.0 specfication" assert ! defn . isResidual ( ) ; if ( defn . isProtected ( ) ) { // Protected items are created by the implementation, so we'll not do these ... continue ; } Name childName = defn . getInternalName ( ) ; if ( ! childNames . contains ( childName ) ) { // We've not already created a child with this name ... JcrNodeType childPrimaryType = defn . getDefaultPrimaryType ( ) ; addChildNode ( childName , childPrimaryType . getInternalName ( ) , null , false , false ) ; } } } } | Create in this node any auto - created child nodes . | 292 | 11 |
33,101 | final AbstractJcrProperty removeExistingProperty ( Name name ) throws VersionException , LockException , RepositoryException { AbstractJcrProperty existing = getProperty ( name ) ; if ( existing != null ) { existing . remove ( ) ; return existing ; } // Return without throwing an exception to match behavior of the reference implementation. // This is also in conformance with the spec. See MODE-956 for details. return null ; } | Removes an existing property with the supplied name . Note that if a property with the given name does not exist then this method returns null and does not throw an exception . | 90 | 34 |
33,102 | final AbstractJcrProperty setProperty ( Name name , Value [ ] values , int jcrPropertyType , boolean skipReferenceValidation ) throws VersionException , LockException , ConstraintViolationException , RepositoryException { return setProperty ( name , values , jcrPropertyType , false , skipReferenceValidation , false , false ) ; } | Sets a multi valued property skipping over protected ones . | 72 | 11 |
33,103 | protected final NodeIterator referringNodes ( ReferenceType referenceType ) throws RepositoryException { if ( ! this . isReferenceable ( ) ) { return JcrEmptyNodeIterator . INSTANCE ; } // Get all of the nodes that are referring to this node ... Set < NodeKey > keys = node ( ) . getReferrers ( sessionCache ( ) , referenceType ) ; if ( keys . isEmpty ( ) ) return JcrEmptyNodeIterator . INSTANCE ; return new JcrNodeIterator ( session ( ) , keys . iterator ( ) , keys . size ( ) , null ) ; } | Obtain an iterator over the nodes that reference this node . | 124 | 12 |
33,104 | protected boolean containsChangesWithExternalDependencies ( AtomicReference < Set < NodeKey > > affectedNodeKeys ) throws RepositoryException { Set < NodeKey > allChanges = sessionCache ( ) . getChangedNodeKeys ( ) ; Set < NodeKey > changesAtOrBelowThis = sessionCache ( ) . getChangedNodeKeysAtOrBelow ( this . node ( ) ) ; removeReferrerChanges ( allChanges , changesAtOrBelowThis ) ; if ( affectedNodeKeys != null ) affectedNodeKeys . set ( changesAtOrBelowThis ) ; return ! changesAtOrBelowThis . containsAll ( allChanges ) ; } | Determines whether this node or any nodes below it contain changes that depend on nodes that are outside of this node s hierarchy . | 131 | 26 |
33,105 | private void removeReferrerChanges ( Set < NodeKey > allChanges , Set < NodeKey > changesAtOrBelowThis ) throws RepositoryException { // check if there are any nodes in the overall list of changes (and outside the branch) due to reference changes for ( Iterator < NodeKey > allChangesIt = allChanges . iterator ( ) ; allChangesIt . hasNext ( ) ; ) { NodeKey changedNodeKey = allChangesIt . next ( ) ; if ( changesAtOrBelowThis . contains ( changedNodeKey ) ) { continue ; } MutableCachedNode changedNodeOutsideBranch = session ( ) . cache ( ) . mutable ( changedNodeKey ) ; AbstractJcrNode changedNode = null ; try { changedNode = session ( ) . node ( changedNodeKey , null ) ; } catch ( ItemNotFoundException e ) { // node was deleted allChangesIt . remove ( ) ; continue ; } boolean isShareable = changedNode . isShareable ( ) ; if ( isShareable /* && changedNodeOutsideBranch.hasOnlyChangesToAdditionalParents() */ ) { // assume that a shared node was added/removed and is to be included ... allChangesIt . remove ( ) ; continue ; } boolean isReferenceable = changedNode . isReferenceable ( ) ; if ( ! isReferenceable ) { continue ; } Set < NodeKey > changedReferrers = changedNodeOutsideBranch . getChangedReferrerNodes ( ) ; for ( NodeKey changedNodeInBranchKey : changesAtOrBelowThis ) { if ( changedReferrers . contains ( changedNodeInBranchKey ) ) { // one of the changes in the branch is a referrer of the node outside the branch so we won't take the outside // node into account allChangesIt . remove ( ) ; } } } } | Removes all the keys from the first set which represent referrer node keys to any of the nodes in the second set . | 386 | 25 |
33,106 | public char [ ] getPassword ( ) { String result = properties . getProperty ( LocalJcrDriver . PASSWORD_PROPERTY_NAME ) ; return result != null ? result . toCharArray ( ) : null ; } | Get the JCR password . This is not required . | 49 | 11 |
33,107 | public boolean isTeiidSupport ( ) { String result = properties . getProperty ( LocalJcrDriver . TEIID_SUPPORT_PROPERTY_NAME ) ; if ( result == null ) { return false ; } return result . equalsIgnoreCase ( Boolean . TRUE . toString ( ) ) ; } | Return true of Teiid support is required for this connection . | 67 | 13 |
33,108 | public Credentials getCredentials ( ) { String username = getUsername ( ) ; char [ ] password = getPassword ( ) ; if ( username != null ) { return new SimpleCredentials ( username , password ) ; } return null ; } | Return the credentials based on the user name and password . | 54 | 11 |
33,109 | public static byte [ ] getHash ( String digestName , InputStream stream ) throws NoSuchAlgorithmException , IOException { CheckArg . isNotNull ( stream , "stream" ) ; MessageDigest digest = MessageDigest . getInstance ( digestName ) ; assert digest != null ; int bufSize = 1024 ; byte [ ] buffer = new byte [ bufSize ] ; int n = stream . read ( buffer , 0 , bufSize ) ; while ( n != - 1 ) { digest . update ( buffer , 0 , n ) ; n = stream . read ( buffer , 0 , bufSize ) ; } return digest . digest ( ) ; } | Get the hash of the supplied content using the digest identified by the supplied name . Note that this method never closes the supplied stream . | 136 | 26 |
33,110 | public static String sha1 ( String string ) { try { byte [ ] sha1 = SecureHash . getHash ( SecureHash . Algorithm . SHA_1 , string . getBytes ( ) ) ; return SecureHash . asHexString ( sha1 ) ; } catch ( NoSuchAlgorithmException e ) { throw new SystemFailureException ( e ) ; } } | Computes the sha1 value for the given string . | 80 | 12 |
33,111 | public RepositoryDelegate createRepositoryDelegate ( String url , Properties info , JcrContextFactory contextFactory ) throws SQLException { if ( ! acceptUrl ( url ) ) { throw new SQLException ( JdbcLocalI18n . invalidUrlPrefix . text ( LocalJcrDriver . JNDI_URL_PREFIX ) ) ; } return create ( determineProtocol ( url ) , url , info , contextFactory ) ; } | Create a RepositoryDelegate instance given the connection information . | 99 | 12 |
33,112 | public static Set < SelectorName > nameSetFrom ( Set < SelectorName > firstSet , Set < SelectorName > secondSet ) { if ( ( firstSet == null || firstSet . isEmpty ( ) ) && ( secondSet == null || secondSet . isEmpty ( ) ) ) { return Collections . emptySet ( ) ; } Set < SelectorName > result = new LinkedHashSet < SelectorName > ( ) ; result . addAll ( firstSet ) ; if ( secondSet != null ) result . addAll ( secondSet ) ; return Collections . unmodifiableSet ( result ) ; } | Create a set that contains the SelectName objects in the supplied sets . | 131 | 14 |
33,113 | public void setSplitPattern ( String regularExpression ) throws PatternSyntaxException { CheckArg . isNotNull ( regularExpression , "regularExpression" ) ; Pattern . compile ( splitPattern ) ; splitPattern = regularExpression ; } | Sets the regular expression to use to split incoming rows . | 51 | 12 |
33,114 | public Document read ( ) { try { do { if ( stream == null ) { // Open the stream to the next file ... stream = openNextFile ( ) ; if ( stream == null ) { // No more files to read ... return null ; } documents = Json . readMultiple ( stream , false ) ; } try { Document doc = documents . nextDocument ( ) ; if ( doc != null ) return doc ; } catch ( IOException e ) { // We'll just continue ... } // Close the stream and try opening the next stream ... close ( stream ) ; stream = null ; } while ( true ) ; } catch ( IOException e ) { problems . addError ( JcrI18n . problemsWritingDocumentToBackup , currentFile . getAbsolutePath ( ) , e . getMessage ( ) ) ; return null ; } } | Read the next document from the files . | 177 | 8 |
33,115 | public Duration add ( long duration , TimeUnit unit ) { long durationInNanos = TimeUnit . NANOSECONDS . convert ( duration , unit ) ; return new Duration ( this . durationInNanos + durationInNanos ) ; } | Add the supplied duration to this duration and return the result . | 53 | 12 |
33,116 | public Duration subtract ( long duration , TimeUnit unit ) { long durationInNanos = TimeUnit . NANOSECONDS . convert ( duration , unit ) ; return new Duration ( this . durationInNanos - durationInNanos ) ; } | Subtract the supplied duration from this duration and return the result . | 53 | 14 |
33,117 | public Duration add ( Duration duration ) { return new Duration ( this . durationInNanos + ( duration == null ? 0l : duration . longValue ( ) ) ) ; } | Add the supplied duration to this duration and return the result . A null value is treated as a duration of 0 nanoseconds . | 37 | 27 |
33,118 | public Duration subtract ( Duration duration ) { return new Duration ( this . durationInNanos - ( duration == null ? 0l : duration . longValue ( ) ) ) ; } | Subtract the supplied duration from this duration and return the result . A null value is treated as a duration of 0 nanoseconds . | 37 | 29 |
33,119 | public Components getComponents ( ) { if ( this . components == null ) { // This is idempotent, so no need to synchronize ... // Calculate how many seconds, and don't lose any information ... BigDecimal bigSeconds = new BigDecimal ( this . durationInNanos ) . divide ( new BigDecimal ( 1000000000 ) ) ; // Calculate the minutes, and round to lose the seconds int minutes = bigSeconds . intValue ( ) / 60 ; // Remove the minutes from the seconds, to just have the remainder of seconds double dMinutes = minutes ; double seconds = bigSeconds . doubleValue ( ) - dMinutes * 60 ; // Now compute the number of full hours, and change 'minutes' to hold the remainding minutes int hours = minutes / 60 ; minutes = minutes - ( hours * 60 ) ; this . components = new Components ( hours , minutes , seconds ) ; } return this . components ; } | Return the duration components . | 203 | 5 |
33,120 | public long getDuration ( TimeUnit unit ) { if ( unit == null ) throw new IllegalArgumentException ( ) ; return unit . convert ( durationInNanos , TimeUnit . NANOSECONDS ) ; } | Get the duration value in the supplied unit of time . | 47 | 11 |
33,121 | public void changed ( ChangeSet changes ) { checkNotClosed ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( "Cache for workspace '{0}' received {1} changes from local sessions: {2}" , workspaceName , changes . size ( ) , changes ) ; } // Clear this workspace's cached nodes (iteratively is okay since it's a ConcurrentMap) ... for ( NodeKey key : changes . changedNodes ( ) ) { if ( closed ) break ; nodesByKey . remove ( key ) ; } // Send the changes to the change bus so that others can see them ... if ( changeBus != null ) changeBus . notify ( changes ) ; } | Signal that changes have been made to the persisted data . Related information in the cache is cleared and this workspace s listener is notified of the changes . | 152 | 30 |
33,122 | private static synchronized JcrRepository getRepository ( String configFileName , String repositoryName , final Context nameCtx , final Name jndiName ) throws IOException , RepositoryException , NamingException { if ( ! StringUtil . isBlank ( repositoryName ) ) { // Make sure the engine is running ... ENGINE . start ( ) ; // See if we can shortcut the process by using the name ... try { JcrRepository repository = ENGINE . getRepository ( repositoryName ) ; switch ( repository . getState ( ) ) { case STARTING : case RUNNING : return repository ; default : LOG . error ( JcrI18n . repositoryIsNotRunningOrHasBeenShutDown , repositoryName ) ; return null ; } } catch ( NoSuchRepositoryException e ) { if ( configFileName == null ) { // No configuration file given, so we can't do anything ... throw e ; } // Nothing found, so continue ... } } RepositoryConfiguration config = RepositoryConfiguration . read ( configFileName ) ; if ( repositoryName == null ) { repositoryName = config . getName ( ) ; } else if ( ! repositoryName . equals ( config . getName ( ) ) ) { LOG . warn ( JcrI18n . repositoryNameDoesNotMatchConfigurationName , repositoryName , config . getName ( ) , configFileName ) ; } // Try to deploy and start the repository ... ENGINE . start ( ) ; JcrRepository repository = ENGINE . deploy ( config ) ; try { ENGINE . startRepository ( repository . getName ( ) ) . get ( ) ; } catch ( InterruptedException e ) { Thread . interrupted ( ) ; throw new RepositoryException ( e ) ; } catch ( ExecutionException e ) { throw new RepositoryException ( e . getCause ( ) ) ; } // Register the JNDI listener, to shut down the repository when removed from JNDI ... if ( nameCtx instanceof EventContext ) { registerNamingListener ( ( EventContext ) nameCtx , jndiName ) ; } return repository ; } | Get or initialize the JCR Repository instance as described by the supplied configuration file and repository name . | 446 | 20 |
33,123 | public static TimeBasedKeys create ( int bitsUsedInCounter ) { CheckArg . isPositive ( bitsUsedInCounter , "bitsUsedInCounter" ) ; int maxAvailableBitsToShift = Long . numberOfLeadingZeros ( System . currentTimeMillis ( ) ) ; CheckArg . isLessThan ( bitsUsedInCounter , maxAvailableBitsToShift , "bitsUsedInCounter" ) ; return new TimeBasedKeys ( ( short ) bitsUsedInCounter ) ; } | Create a new generator that uses the specified number of bits for the counter portion of the keys . | 105 | 19 |
33,124 | public long nextKey ( ) { // Note that per Oracle the currentTimeMillis is the current number of seconds past the epoch // in UTC (not in local time). Therefore, processes with exactly synchronized clocks will // always get the same value regardless of their timezone ... final long timestamp = System . currentTimeMillis ( ) ; final int increment = counterFor ( timestamp ) ; if ( increment <= maximumCounterValue ) { return ( timestamp << counterBits ) + increment ; } // The counter is surprisingly too high, so try again (repeatedly) until we get to the next millisecond ... return this . nextKey ( ) ; } | Get the next key for the current time in UTC . | 133 | 11 |
33,125 | protected void nullReference ( List < Comparison > comparisons , Comparison comparisonToNull ) { if ( comparisonToNull != null ) { for ( int i = 0 ; i != comparisons . size ( ) ; ++ i ) { if ( comparisons . get ( i ) == comparisonToNull ) comparisons . set ( i , null ) ; } } } | Find all occurrences of the comparison object in the supplied list and null the list s reference to it . | 70 | 20 |
33,126 | protected void nullReference ( List < Comparison > comparisons , Iterable < Comparison > comparisonsToNull ) { for ( Comparison comparisonToNull : comparisonsToNull ) { nullReference ( comparisons , comparisonToNull ) ; } } | Find all references in the supplied list that match those supplied and set them to null . | 45 | 17 |
33,127 | protected int compareStaticOperands ( QueryContext context , Comparison comparison1 , Comparison comparison2 ) { Object value1 = getValue ( context , comparison1 . getOperand2 ( ) ) ; Object value2 = getValue ( context , comparison2 . getOperand2 ( ) ) ; return ValueComparators . OBJECT_COMPARATOR . compare ( value1 , value2 ) ; } | Compare the values used in the two comparisons | 82 | 8 |
33,128 | private String getContentType ( List < FileItem > items ) { for ( FileItem i : items ) { if ( ! i . isFormField ( ) && i . getFieldName ( ) . equals ( CONTENT_PARAMETER ) ) { return i . getContentType ( ) ; } } return null ; } | Determines content - type of the uploaded file . | 69 | 11 |
33,129 | protected long claimUpTo ( int number ) { assert number > 0 ; long nextPosition = this . nextPosition ; long maxPosition = nextPosition + number ; long wrapPoint = maxPosition - bufferSize ; long cachedSlowestConsumerPosition = this . slowestConsumerPosition ; if ( wrapPoint > cachedSlowestConsumerPosition || cachedSlowestConsumerPosition > nextPosition ) { long minPosition ; while ( wrapPoint > ( minPosition = positionOfSlowestPointer ( nextPosition ) ) ) { // This takes on the order of tens of nanoseconds, so it's a useful activity to pause a bit. LockSupport . parkNanos ( 1L ) ; waitStrategy . signalAllWhenBlocking ( ) ; } this . slowestConsumerPosition = minPosition ; } this . nextPosition = maxPosition ; return maxPosition ; } | Claim up to the supplied number of positions . | 177 | 9 |
33,130 | private List < String > getRootfiles ( ZipInputStream zipStream ) throws Exception { List < String > rootfiles = new ArrayList <> ( ) ; ZipEntry entry = null ; while ( ( entry = zipStream . getNextEntry ( ) ) != null ) { String entryName = entry . getName ( ) ; if ( entryName . endsWith ( "META-INF/container.xml" ) ) { ByteArrayOutputStream content = getZipEntryContent ( zipStream , entry ) ; DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder builder = factory . newDocumentBuilder ( ) ; Document doc = builder . parse ( new ByteArrayInputStream ( content . toByteArray ( ) ) ) ; XPathFactory xPathfactory = XPathFactory . newInstance ( ) ; XPath xpath = xPathfactory . newXPath ( ) ; XPathExpression expr = xpath . compile ( "/container/rootfiles/rootfile" ) ; NodeList rootfileNodes = ( NodeList ) expr . evaluate ( doc , XPathConstants . NODESET ) ; for ( int i = 0 ; i < rootfileNodes . getLength ( ) ; i ++ ) { Node node = rootfileNodes . item ( i ) ; rootfiles . add ( node . getAttributes ( ) . getNamedItem ( "full-path" ) . getNodeValue ( ) ) ; } break ; } } return rootfiles ; } | Parse the container file to get the list of all rootfile packages . | 316 | 15 |
33,131 | private ByteArrayOutputStream getZipEntryContent ( ZipInputStream zipStream , ZipEntry entry ) throws IOException { try ( ByteArrayOutputStream content = new ByteArrayOutputStream ( ) ) { byte [ ] bytes = new byte [ ( int ) entry . getSize ( ) ] ; int read ; while ( ( read = zipStream . read ( bytes , 0 , bytes . length ) ) != - 1 ) { content . write ( bytes , 0 , read ) ; } return content ; } } | Read the content of the ZipEntry without closing the stream . | 104 | 12 |
33,132 | protected void incrementBinaryReferenceCount ( BinaryKey binaryKey , Set < BinaryKey > unusedBinaryKeys , Set < BinaryKey > usedBinaryKeys ) { // Find the document metadata and increment the usage count ... String sha1 = binaryKey . toString ( ) ; String key = keyForBinaryReferenceDocument ( sha1 ) ; // don't acquire a lock since we've already done this at the beginning of the #save EditableDocument entry = documentStore . edit ( key , false ) ; if ( entry == null ) { // The document doesn't yet exist, so create it ... Document content = Schematic . newDocument ( SHA1 , sha1 , REFERENCE_COUNT , 1L ) ; documentStore . localStore ( ) . put ( key , content ) ; } else { Long countValue = entry . getLong ( REFERENCE_COUNT ) ; entry . setNumber ( REFERENCE_COUNT , countValue != null ? countValue + 1 : 1L ) ; } // We're using the sha1, so remove it if its in the set of unused binary keys ... if ( unusedBinaryKeys != null ) { unusedBinaryKeys . remove ( binaryKey ) ; } if ( usedBinaryKeys != null ) { usedBinaryKeys . add ( binaryKey ) ; } } | Increment the reference count for the stored binary value with the supplied SHA - 1 hash . | 282 | 18 |
33,133 | protected void decrementBinaryReferenceCount ( Object fieldValue , Set < BinaryKey > unusedBinaryKeys , Set < BinaryKey > usedBinaryKeys ) { if ( fieldValue instanceof List < ? > ) { for ( Object value : ( List < ? > ) fieldValue ) { decrementBinaryReferenceCount ( value , unusedBinaryKeys , usedBinaryKeys ) ; } } else if ( fieldValue instanceof Object [ ] ) { for ( Object value : ( Object [ ] ) fieldValue ) { decrementBinaryReferenceCount ( value , unusedBinaryKeys , usedBinaryKeys ) ; } } else { String sha1 = null ; if ( fieldValue instanceof Document ) { Document docValue = ( Document ) fieldValue ; sha1 = docValue . getString ( SHA1_FIELD ) ; } else if ( fieldValue instanceof BinaryKey ) { sha1 = fieldValue . toString ( ) ; } else if ( fieldValue instanceof org . modeshape . jcr . api . Binary && ! ( fieldValue instanceof InMemoryBinaryValue ) ) { sha1 = ( ( org . modeshape . jcr . api . Binary ) fieldValue ) . getHexHash ( ) ; } if ( sha1 != null ) { BinaryKey binaryKey = new BinaryKey ( sha1 ) ; // Find the document metadata and decrement the usage count ... // Don't acquire a lock since we should've done so at the beginning of the #save method EditableDocument sha1Usage = documentStore . edit ( keyForBinaryReferenceDocument ( sha1 ) , false ) ; if ( sha1Usage != null ) { Long countValue = sha1Usage . getLong ( REFERENCE_COUNT ) ; assert countValue != null ; long count = countValue - 1 ; assert count >= 0 ; if ( count == 0 ) { // We're not using the binary value anymore ... if ( unusedBinaryKeys != null ) { unusedBinaryKeys . add ( binaryKey ) ; } if ( usedBinaryKeys != null ) { usedBinaryKeys . remove ( binaryKey ) ; } } sha1Usage . setNumber ( REFERENCE_COUNT , count ) ; } else { // The documentStore doesn't contain the binary ref count doc, so we're no longer using the binary value ... if ( unusedBinaryKeys != null ) { unusedBinaryKeys . add ( binaryKey ) ; } if ( usedBinaryKeys != null ) { usedBinaryKeys . remove ( binaryKey ) ; } } } } } | Decrement the reference count for the binary value . | 551 | 10 |
33,134 | protected boolean isLocked ( EditableDocument doc ) { return hasProperty ( doc , JcrLexicon . LOCK_OWNER ) || hasProperty ( doc , JcrLexicon . LOCK_IS_DEEP ) ; } | Checks if the given document is already locked | 50 | 9 |
33,135 | public static InputStream read ( String path , ClassLoader classLoader , boolean useTLCL ) { if ( useTLCL ) { InputStream stream = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( path ) ; if ( stream != null ) { return stream ; } } return classLoader != null ? classLoader . getResourceAsStream ( path ) : ResourceLookup . class . getResourceAsStream ( path ) ; } | Returns the stream of a resource at a given path using some optional class loaders . | 97 | 17 |
33,136 | public static InputStream read ( String path , Class < ? > clazz , boolean useTLCL ) { return read ( path , clazz . getClassLoader ( ) , useTLCL ) ; } | Returns the stream of a resource at a given path using the CL of a class . | 42 | 17 |
33,137 | private void setS3ObjectTag ( String objectKey , String tagKey , String tagValue ) throws BinaryStoreException { try { GetObjectTaggingRequest getTaggingRequest = new GetObjectTaggingRequest ( bucketName , objectKey ) ; GetObjectTaggingResult getTaggingResult = s3Client . getObjectTagging ( getTaggingRequest ) ; List < Tag > initialTagSet = getTaggingResult . getTagSet ( ) ; List < Tag > mergedTagSet = mergeS3TagSet ( initialTagSet , new Tag ( tagKey , tagValue ) ) ; if ( initialTagSet . size ( ) == mergedTagSet . size ( ) && initialTagSet . containsAll ( mergedTagSet ) ) { return ; } SetObjectTaggingRequest setObjectTaggingRequest = new SetObjectTaggingRequest ( bucketName , objectKey , new ObjectTagging ( mergedTagSet ) ) ; s3Client . setObjectTagging ( setObjectTaggingRequest ) ; } catch ( AmazonClientException e ) { throw new BinaryStoreException ( e ) ; } } | Sets a tag on a S3 object potentially overwriting the existing value . | 229 | 17 |
33,138 | private List < Tag > mergeS3TagSet ( List < Tag > initialTags , Tag changeTag ) { Map < String , String > mergedTags = initialTags . stream ( ) . collect ( Collectors . toMap ( Tag :: getKey , Tag :: getValue ) ) ; mergedTags . put ( changeTag . getKey ( ) , changeTag . getValue ( ) ) ; return mergedTags . entrySet ( ) . stream ( ) . map ( entry -> new Tag ( entry . getKey ( ) , entry . getValue ( ) ) ) . collect ( Collectors . toList ( ) ) ; } | Merges a new tag into an existing list of tags . It will be either appended to the list or overwrite the value of an existing tag with the same key . | 129 | 34 |
33,139 | protected long parseLong ( DdlTokenStream tokens , DataType dataType ) { String value = consume ( tokens , dataType , false ) ; return parseLong ( value ) ; } | Returns a long value from the input token stream assuming the long is not bracketed with parenthesis . | 38 | 20 |
33,140 | protected long parseBracketedLong ( DdlTokenStream tokens , DataType dataType ) { consume ( tokens , dataType , false , L_PAREN ) ; String value = consume ( tokens , dataType , false ) ; consume ( tokens , dataType , false , R_PAREN ) ; return parseLong ( value ) ; } | Returns a long value from the input token stream assuming the long is bracketed with parenthesis . | 71 | 19 |
33,141 | public static final SequencerPathExpression compile ( String expression ) throws InvalidSequencerPathExpression { CheckArg . isNotNull ( expression , "sequencer path expression" ) ; expression = expression . trim ( ) ; if ( expression . length ( ) == 0 ) { throw new InvalidSequencerPathExpression ( RepositoryI18n . pathExpressionMayNotBeBlank . text ( ) ) ; } java . util . regex . Matcher matcher = TWO_PART_PATTERN . matcher ( expression ) ; if ( ! matcher . matches ( ) ) { throw new InvalidSequencerPathExpression ( RepositoryI18n . pathExpressionIsInvalid . text ( expression ) ) ; } String selectExpression = matcher . group ( 1 ) ; String outputExpression = matcher . group ( 2 ) ; return new SequencerPathExpression ( PathExpression . compile ( selectExpression ) , outputExpression ) ; } | Compile the supplied expression and return the resulting SequencerPathExpression instance . | 202 | 16 |
33,142 | public Matcher matcher ( String absolutePath ) { PathExpression . Matcher inputMatcher = selectExpression . matcher ( absolutePath ) ; String outputPath = null ; WorkspacePath wsPath = null ; if ( inputMatcher . matches ( ) ) { // Grab the named groups ... Map < Integer , String > replacements = new HashMap < Integer , String > ( ) ; for ( int i = 0 , count = inputMatcher . groupCount ( ) ; i <= count ; ++ i ) { replacements . put ( i , inputMatcher . group ( i ) ) ; } // Grab the selected path ... String selectedPath = inputMatcher . getSelectedNodePath ( ) ; // Find the output path using the groups from the match pattern ... wsPath = PathExpression . parsePathInWorkspace ( this . outputExpression ) ; if ( wsPath != null ) { if ( wsPath . workspaceName == null ) wsPath = wsPath . withWorkspaceName ( inputMatcher . getSelectedWorkspaceName ( ) ) ; outputPath = wsPath . path ; if ( ! DEFAULT_OUTPUT_EXPRESSION . equals ( outputPath ) ) { java . util . regex . Matcher replacementMatcher = REPLACEMENT_VARIABLE_PATTERN . matcher ( outputPath ) ; // CHECKSTYLE IGNORE check FOR NEXT 1 LINES StringBuffer sb = new StringBuffer ( ) ; if ( replacementMatcher . find ( ) ) { do { String variable = replacementMatcher . group ( 1 ) ; String replacement = replacements . get ( Integer . valueOf ( variable ) ) ; if ( replacement == null ) replacement = replacementMatcher . group ( 0 ) ; replacementMatcher . appendReplacement ( sb , replacement ) ; } while ( replacementMatcher . find ( ) ) ; replacementMatcher . appendTail ( sb ) ; outputPath = sb . toString ( ) ; } // Make sure there is a trailing '/' ... if ( ! outputPath . endsWith ( "/" ) ) outputPath = outputPath + "/" ; // Replace all references to "/./" with "/" ... outputPath = outputPath . replaceAll ( "/\\./" , "/" ) ; // Remove any path segment followed by a parent reference ... java . util . regex . Matcher parentMatcher = PARENT_PATTERN . matcher ( outputPath ) ; while ( parentMatcher . find ( ) ) { outputPath = parentMatcher . replaceAll ( "" ) ; // Make sure there is a trailing '/' ... if ( ! outputPath . endsWith ( "/" ) ) outputPath = outputPath + "/" ; parentMatcher = PARENT_PATTERN . matcher ( outputPath ) ; } // Remove all multiple occurrences of '/' ... outputPath = outputPath . replaceAll ( "/{2,}" , "/" ) ; // Remove the trailing '/@property' ... outputPath = outputPath . replaceAll ( "/@[^/\\[\\]]+$" , "" ) ; // Remove a trailing '/' ... outputPath = outputPath . replaceAll ( "/$" , "" ) ; // If the output path is blank, then use the default output expression ... if ( outputPath . length ( ) == 0 ) outputPath = DEFAULT_OUTPUT_EXPRESSION ; } if ( DEFAULT_OUTPUT_EXPRESSION . equals ( outputPath ) ) { // The output path is the default expression, so use the selected path ... outputPath = selectedPath ; } wsPath = wsPath . withPath ( outputPath ) ; } } return new Matcher ( inputMatcher , wsPath ) ; } | Obtain a Matcher that can be used to convert the supplied workspace key and absolute path into an output workspace name and and output path . | 798 | 28 |
33,143 | public static EditableDocument newDocument ( Document original ) { BasicDocument newDoc = new BasicDocument ( ) ; newDoc . putAll ( original ) ; return new DocumentEditor ( newDoc , DEFAULT_FACTORY ) ; } | Create a new editable document that is a copy of the supplied document . | 49 | 15 |
33,144 | public static EditableDocument newDocument ( String name , Object value ) { return new DocumentEditor ( new BasicDocument ( name , value ) , DEFAULT_FACTORY ) ; } | Create a new editable document initialized with a single field that can be used as a new document entry in a SchematicDb or as nested documents for other documents . | 38 | 33 |
33,145 | public static EditableDocument newDocument ( String name1 , Object value1 , String name2 , Object value2 ) { return new DocumentEditor ( new BasicDocument ( name1 , value1 , name2 , value2 ) , DEFAULT_FACTORY ) ; } | Create a new editable document initialized with two fields that can be used as a new document entry in a SchematicDb or as nested documents for other documents . | 56 | 32 |
33,146 | public static SiblingCounter constant ( final int count ) { assert count > - 1 ; return new SiblingCounter ( ) { @ Override public int countSiblingsNamed ( Name childName ) { return count ; } } ; } | Create a sibling counter that always return the supplied count regardless of the name or node . | 49 | 17 |
33,147 | public static SiblingCounter alter ( final SiblingCounter counter , final int delta ) { assert counter != null ; return new SiblingCounter ( ) { @ Override public int countSiblingsNamed ( Name childName ) { int count = counter . countSiblingsNamed ( childName ) + delta ; return count > 0 ? count : 0 ; // never negative } } ; } | Creates a sibling counter that alters another counter by a constant value . | 80 | 14 |
33,148 | public RestWorkspaces getWorkspaces ( HttpServletRequest request , String repositoryName ) throws RepositoryException { assert request != null ; assert repositoryName != null ; RestWorkspaces workspaces = new RestWorkspaces ( ) ; Session session = getSession ( request , repositoryName , null ) ; for ( String workspaceName : session . getWorkspace ( ) . getAccessibleWorkspaceNames ( ) ) { String repositoryUrl = RestHelper . urlFrom ( request ) ; workspaces . addWorkspace ( workspaceName , repositoryUrl ) ; } return workspaces ; } | Returns the list of workspaces available to this user within the named repository . | 119 | 15 |
33,149 | public Response backupRepository ( ServletContext context , HttpServletRequest request , String repositoryName , BackupOptions options ) throws RepositoryException { final File backupLocation = resolveBackupLocation ( context ) ; Session session = getSession ( request , repositoryName , null ) ; String repositoryVersion = session . getRepository ( ) . getDescriptorValue ( Repository . REP_VERSION_DESC ) . getString ( ) . replaceAll ( "\\." , "" ) ; final String backupName = "modeshape_" + repositoryVersion + "_" + repositoryName + "_backup_" + DATE_FORMAT . format ( new Date ( ) ) ; final File backup = new File ( backupLocation , backupName ) ; if ( ! backup . mkdirs ( ) ) { throw new RuntimeException ( "Cannot create backup folder: " + backup ) ; } logger . debug ( "Backing up repository '{0}' to '{1}', using '{2}'" , repositoryName , backup , options ) ; RepositoryManager repositoryManager = ( ( org . modeshape . jcr . api . Workspace ) session . getWorkspace ( ) ) . getRepositoryManager ( ) ; repositoryManager . backupRepository ( backup , options ) ; final String backupURL ; try { backupURL = backup . toURI ( ) . toURL ( ) . toString ( ) ; } catch ( MalformedURLException e ) { //should never happen throw new RuntimeException ( e ) ; } JSONAble responseContent = new JSONAble ( ) { @ Override public JSONObject toJSON ( ) throws JSONException { JSONObject object = new JSONObject ( ) ; object . put ( "name" , backupName ) ; object . put ( "url" , backupURL ) ; return object ; } } ; return Response . status ( Response . Status . CREATED ) . entity ( responseContent ) . build ( ) ; } | Performs a repository backup . | 417 | 6 |
33,150 | public Response restoreRepository ( ServletContext context , HttpServletRequest request , String repositoryName , String backupName , RestoreOptions options ) throws RepositoryException { if ( StringUtil . isBlank ( backupName ) ) { throw new IllegalArgumentException ( "The name of the backup cannot be null" ) ; } File backup = resolveBackup ( context , backupName ) ; logger . debug ( "Restoring repository '{0}' from backup '{1}' using '{2}'" , repositoryName , backup , options ) ; Session session = getSession ( request , repositoryName , null ) ; RepositoryManager repositoryManager = ( ( org . modeshape . jcr . api . Workspace ) session . getWorkspace ( ) ) . getRepositoryManager ( ) ; final Problems problems = repositoryManager . restoreRepository ( backup , options ) ; if ( ! problems . hasProblems ( ) ) { return Response . ok ( ) . build ( ) ; } List < JSONAble > response = new ArrayList < JSONAble > ( problems . size ( ) ) ; for ( Problem problem : problems ) { RestException exception = problem . getThrowable ( ) != null ? new RestException ( problem . getMessage ( ) , problem . getThrowable ( ) ) : new RestException ( problem . getMessage ( ) ) ; response . add ( exception ) ; } return Response . status ( Response . Status . INTERNAL_SERVER_ERROR ) . entity ( response ) . build ( ) ; } | Restores a repository using an existing backup . | 325 | 9 |
33,151 | public void add ( T value ) { Lock lock = this . lock . writeLock ( ) ; try { lock . lock ( ) ; doAddValue ( value ) ; } finally { lock . unlock ( ) ; } } | Add a new value to these statistics . | 46 | 8 |
33,152 | public T getTotal ( ) { Lock lock = this . lock . readLock ( ) ; lock . lock ( ) ; try { return this . total ; } finally { lock . unlock ( ) ; } } | Get the aggregate sum of the values in the series . | 43 | 11 |
33,153 | public T getMaximum ( ) { Lock lock = this . lock . readLock ( ) ; lock . lock ( ) ; try { return this . maximum ; } finally { lock . unlock ( ) ; } } | Get the maximum value in the series . | 43 | 8 |
33,154 | public T getMinimum ( ) { Lock lock = this . lock . readLock ( ) ; lock . lock ( ) ; try { return this . minimum != null ? this . minimum : ( T ) this . math . createZeroValue ( ) ; } finally { lock . unlock ( ) ; } } | Get the minimum value in the series . | 62 | 8 |
33,155 | public int getCount ( ) { Lock lock = this . lock . readLock ( ) ; lock . lock ( ) ; try { return this . count ; } finally { lock . unlock ( ) ; } } | Get the number of values that have been measured . | 43 | 10 |
33,156 | public void reset ( ) { Lock lock = this . lock . writeLock ( ) ; lock . lock ( ) ; try { doReset ( ) ; } finally { lock . unlock ( ) ; } } | Reset the statistics in this object and clear out any stored information . | 43 | 14 |
33,157 | public static String getSubstitutedProperty ( String value , PropertyAccessor propertyAccessor ) { if ( value == null || value . trim ( ) . length ( ) == 0 ) return value ; StringBuilder sb = new StringBuilder ( value ) ; // Get the index of the first constant, if any int startName = sb . indexOf ( CURLY_PREFIX ) ; if ( startName == - 1 ) return value ; // process as many different variable groupings that are defined, where one group will resolve to one property // substitution while ( startName != - 1 ) { String defaultValue = null ; int endName = sb . indexOf ( CURLY_SUFFIX , startName ) ; if ( endName == - 1 ) { // if no suffix can be found, then this variable was probably defined incorrectly // but return what there is at this point return sb . toString ( ) ; } String varString = sb . substring ( startName + 2 , endName ) ; if ( varString . indexOf ( DEFAULT_DELIM ) > - 1 ) { List < String > defaults = split ( varString , DEFAULT_DELIM ) ; // get the property(s) variables that are defined left of the default delimiter. varString = defaults . get ( 0 ) ; // if the default is defined, then capture in case none of the other properties are found if ( defaults . size ( ) == 2 ) { defaultValue = defaults . get ( 1 ) ; } } String constValue = null ; // split the property(s) based VAR_DELIM, when multiple property options are defined List < String > vars = split ( varString , VAR_DELIM ) ; for ( final String var : vars ) { constValue = System . getenv ( var ) ; if ( constValue == null ) { constValue = propertyAccessor . getProperty ( var ) ; } // the first found property is the value to be substituted if ( constValue != null ) { break ; } } // if no property is found to substitute, then use the default value, if defined if ( constValue == null && defaultValue != null ) { constValue = defaultValue ; } if ( constValue != null ) { sb = sb . replace ( startName , endName + 1 , constValue ) ; // Checking for another constants startName = sb . indexOf ( CURLY_PREFIX ) ; } else { // continue to try to substitute for other properties so that all defined variables // are tried to be substituted for startName = sb . indexOf ( CURLY_PREFIX , endName ) ; } } return sb . toString ( ) ; } | getSubstitutedProperty is called to perform the property substitution on the value . | 582 | 17 |
33,158 | private static List < String > split ( String str , String splitter ) { StringTokenizer tokens = new StringTokenizer ( str , splitter ) ; ArrayList < String > l = new ArrayList <> ( tokens . countTokens ( ) ) ; while ( tokens . hasMoreTokens ( ) ) { l . add ( tokens . nextToken ( ) ) ; } return l ; } | Split a string into pieces based on delimiters . Similar to the perl function of the same name . The delimiters are not included in the returned strings . | 81 | 33 |
33,159 | protected void checkFileNotExcluded ( String id , File file ) { if ( isExcluded ( file ) ) { String msg = JcrI18n . fileConnectorCannotStoreFileThatIsExcluded . text ( getSourceName ( ) , id , file . getAbsolutePath ( ) ) ; throw new DocumentStoreException ( id , msg ) ; } } | Utility method to ensure that the file is writable by this connector . | 78 | 15 |
33,160 | public static String filter ( String message ) { if ( message == null ) { return ( null ) ; } char content [ ] = new char [ message . length ( ) ] ; message . getChars ( 0 , message . length ( ) , content , 0 ) ; StringBuilder result = new StringBuilder ( content . length + 50 ) ; for ( int i = 0 ; i < content . length ; i ++ ) { switch ( content [ i ] ) { case ' ' : result . append ( "<" ) ; break ; case ' ' : result . append ( ">" ) ; break ; case ' ' : result . append ( "&" ) ; break ; case ' ' : result . append ( """ ) ; break ; default : result . append ( content [ i ] ) ; } } return ( result . toString ( ) ) ; } | Filter the specified message string for characters that are sensitive in HTML . This avoids potential attacks caused by including JavaScript codes in the request URL that is often reported in error messages . | 186 | 34 |
33,161 | public static Cookie [ ] parseCookieHeader ( String header ) { if ( ( header == null ) || ( header . length ( ) < 1 ) ) { return ( new Cookie [ 0 ] ) ; } ArrayList < Cookie > cookies = new ArrayList < Cookie > ( ) ; while ( header . length ( ) > 0 ) { int semicolon = header . indexOf ( ' ' ) ; if ( semicolon < 0 ) { semicolon = header . length ( ) ; } if ( semicolon == 0 ) { break ; } String token = header . substring ( 0 , semicolon ) ; if ( semicolon < header . length ( ) ) { header = header . substring ( semicolon + 1 ) ; } else { header = "" ; } try { int equals = token . indexOf ( ' ' ) ; if ( equals > 0 ) { String name = token . substring ( 0 , equals ) . trim ( ) ; String value = token . substring ( equals + 1 ) . trim ( ) ; cookies . add ( new Cookie ( name , value ) ) ; } } catch ( Throwable e ) { // do nothing ?! } } return cookies . toArray ( new Cookie [ cookies . size ( ) ] ) ; } | Parse a cookie header into an array of cookies according to RFC 2109 . | 268 | 16 |
33,162 | public static String URLDecode ( String str , String enc ) { if ( str == null ) { return ( null ) ; } // use the specified encoding to extract bytes out of the // given string so that the encoding is not lost. If an // encoding is not specified, let it use platform default byte [ ] bytes = null ; try { if ( enc == null ) { bytes = str . getBytes ( ) ; } else { bytes = str . getBytes ( enc ) ; } } catch ( UnsupportedEncodingException uee ) { } return URLDecode ( bytes , enc ) ; } | Decode and return the specified URL - encoded String . | 125 | 11 |
33,163 | public static String URLDecode ( byte [ ] bytes , String enc ) { if ( bytes == null ) { return ( null ) ; } int len = bytes . length ; int ix = 0 ; int ox = 0 ; while ( ix < len ) { byte b = bytes [ ix ++ ] ; // Get byte to test if ( b == ' ' ) { b = ( byte ) ' ' ; } else if ( b == ' ' ) { b = ( byte ) ( ( convertHexDigit ( bytes [ ix ++ ] ) << 4 ) + convertHexDigit ( bytes [ ix ++ ] ) ) ; } bytes [ ox ++ ] = b ; } if ( enc != null ) { try { return new String ( bytes , 0 , ox , enc ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } return new String ( bytes , 0 , ox ) ; } | Decode and return the specified URL - encoded byte array . | 197 | 12 |
33,164 | public static boolean streamNotConsumed ( HttpServletRequest request ) { try { ServletInputStream servletInputStream = request . getInputStream ( ) ; //in servlet >= 3.0, available will throw an exception (while previously it didn't) return request . getContentLength ( ) != 0 && servletInputStream . available ( ) > 0 ; } catch ( IOException e ) { return false ; } } | Checks if the input stream of the given request is nor isn t consumed . This method is backwards - compatible with Servlet 2 . x as in Servlet 3 . x there is a isFinished method . | 91 | 43 |
33,165 | public static JcrAccessControlList defaultAcl ( AccessControlManagerImpl acm ) { JcrAccessControlList acl = new JcrAccessControlList ( "/" ) ; try { acl . principals . put ( SimplePrincipal . EVERYONE , new AccessControlEntryImpl ( SimplePrincipal . EVERYONE , acm . privileges ( ) ) ) ; } catch ( AccessControlException e ) { // will never happen } return acl ; } | Creates default Access Control List . | 95 | 7 |
33,166 | public boolean hasPrivileges ( SecurityContext sc , Privilege [ ] privileges ) { for ( AccessControlEntryImpl ace : principals . values ( ) ) { // check access list for everyone if ( ace . getPrincipal ( ) . getName ( ) . equals ( SimplePrincipal . EVERYONE . getName ( ) ) ) { if ( ace . hasPrivileges ( privileges ) ) { return true ; } } // check user principal if ( ace . getPrincipal ( ) . getName ( ) . equals ( username ( sc . getUserName ( ) ) ) ) { if ( ace . hasPrivileges ( privileges ) ) { return true ; } } // check group/role principal if ( sc . hasRole ( ace . getPrincipal ( ) . getName ( ) ) ) { if ( ace . hasPrivileges ( privileges ) ) { return true ; } } } return false ; } | Tests privileges relatively to the given security context . | 191 | 10 |
33,167 | public Privilege [ ] getPrivileges ( SecurityContext context ) { ArrayList < Privilege > privs = new ArrayList < Privilege > ( ) ; for ( AccessControlEntryImpl ace : principals . values ( ) ) { // add privileges granted for everyone if ( ace . getPrincipal ( ) . equals ( SimplePrincipal . EVERYONE ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } // add privileges granted for given user if ( ace . getPrincipal ( ) . getName ( ) . equals ( username ( context . getUserName ( ) ) ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } // add privileges granted for given role if ( context . hasRole ( ace . getPrincipal ( ) . getName ( ) ) ) { privs . addAll ( Arrays . asList ( ace . getPrivileges ( ) ) ) ; } } Privilege [ ] res = new Privilege [ privs . size ( ) ] ; privs . toArray ( res ) ; return res ; } | Lists all privileges defined by this access list for the given user . | 246 | 14 |
33,168 | private String username ( String username ) { return ( username . startsWith ( "<" ) && username . endsWith ( ">" ) ) ? username . substring ( 1 , username . length ( ) - 1 ) : username ; } | Removes brackets enclosing given user name | 48 | 8 |
33,169 | public NodeCache getNodeCache ( String workspaceName ) throws WorkspaceNotFoundException { NodeCache cache = overriddenNodeCachesByWorkspaceName . get ( workspaceName ) ; if ( cache == null ) { cache = repositoryCache . getWorkspaceCache ( workspaceName ) ; } return cache ; } | Get the NodeCache for the given workspace name . The result will either be the overridden value supplied in the constructor or the workspace cache from the referenced RepositoryCache . | 64 | 34 |
33,170 | public QueryContext with ( Schemata schemata ) { CheckArg . isNotNull ( schemata , "schemata" ) ; return new QueryContext ( context , repositoryCache , workspaceNames , overriddenNodeCachesByWorkspaceName , schemata , indexDefns , nodeTypes , bufferManager , hints , problems , variables ) ; } | Obtain a copy of this context except that the copy uses the supplied schemata . | 76 | 18 |
33,171 | public QueryContext with ( Problems problems ) { return new QueryContext ( context , repositoryCache , workspaceNames , overriddenNodeCachesByWorkspaceName , schemata , indexDefns , nodeTypes , bufferManager , hints , problems , variables ) ; } | Obtain a copy of this context except that the copy uses the supplied problem container . | 54 | 17 |
33,172 | public QueryContext with ( Map < String , Object > variables ) { return new QueryContext ( context , repositoryCache , workspaceNames , overriddenNodeCachesByWorkspaceName , schemata , indexDefns , nodeTypes , bufferManager , hints , problems , variables ) ; } | Obtain a copy of this context except that the copy uses the supplied variables . | 59 | 16 |
33,173 | public void scoreText ( String text , int factor , String ... keywords ) { if ( text != null && keywords != null ) { // Increment the score once for each keyword that is found within the text ... String lowercaseText = text . toLowerCase ( ) ; for ( String keyword : keywords ) { if ( keyword == null ) continue ; String lowercaseKeyword = keyword . toLowerCase ( ) ; int index = 0 ; while ( true ) { index = lowercaseText . indexOf ( lowercaseKeyword , index ) ; if ( index == - 1 ) break ; score += factor ; ++ index ; } } } } | Increment the score if the given text contains any of the supply keywords . | 133 | 15 |
33,174 | protected String removeUnusedPredicates ( String expression ) { assert expression != null ; java . util . regex . Matcher matcher = UNUSABLE_PREDICATE_PATTERN . matcher ( expression ) ; // CHECKSTYLE IGNORE check FOR NEXT 1 LINES StringBuffer sb = new StringBuffer ( ) ; if ( matcher . find ( ) ) { do { // Remove those predicates that show up in group 1 ... String predicateStr = matcher . group ( 0 ) ; String unusablePredicateStr = matcher . group ( 1 ) ; if ( unusablePredicateStr != null ) { predicateStr = "" ; } matcher . appendReplacement ( sb , predicateStr ) ; } while ( matcher . find ( ) ) ; matcher . appendTail ( sb ) ; expression = sb . toString ( ) ; } return expression ; } | Replace certain XPath patterns that are not used or understood . | 191 | 13 |
33,175 | public Object get ( String name ) { Object obj = document . get ( name ) ; return ( obj instanceof BasicArray ) ? ( ( BasicArray ) obj ) . toArray ( ) : obj ; } | Gets property value . | 43 | 5 |
33,176 | private void readFromStream ( InputStream in ) throws IOException { document = DocumentFactory . newDocument ( Json . read ( in ) ) ; } | Reads document content from the stream . | 32 | 8 |
33,177 | public final String [ ] getPathExpressions ( ) { String pathExpression = this . pathExpression ; Object [ ] pathExpressions = this . pathExpressions ; if ( pathExpression == null && ( pathExpressions == null || pathExpressions . length == 0 ) ) { // there's none ... return new String [ ] { } ; } if ( pathExpression != null && ( pathExpressions == null || pathExpressions . length == 0 ) ) { // There's just one ... return new String [ ] { pathExpression } ; } List < String > expressions = new ArrayList < String > ( pathExpressions . length + 1 ) ; addExpression ( expressions , pathExpression ) ; for ( Object value : pathExpressions ) { addExpression ( expressions , value ) ; } return expressions . toArray ( new String [ expressions . size ( ) ] ) ; } | Obtain the path expressions as configured on the sequencer . This method always returns a copy to prevent modification of the values . | 189 | 25 |
33,178 | public final boolean isAccepted ( String mimeType ) { if ( mimeType != null && hasAcceptedMimeTypes ( ) ) { return getAcceptedMimeTypes ( ) . contains ( mimeType . trim ( ) ) ; } return true ; // accept all mime types } | Determine if this sequencer has been configured to accept and process content with the supplied MIME type . | 63 | 22 |
33,179 | private List < SessionNode > getChangedNodesAtOrBelowChildrenFirst ( Path nodePath ) { List < SessionNode > changedNodesChildrenFirst = new ArrayList < SessionNode > ( ) ; for ( NodeKey key : changedNodes . keySet ( ) ) { SessionNode changedNode = changedNodes . get ( key ) ; boolean isAtOrBelow = false ; try { isAtOrBelow = changedNode . isAtOrBelow ( this , nodePath ) ; } catch ( NodeNotFoundException e ) { isAtOrBelow = false ; } if ( ! isAtOrBelow ) { continue ; } int insertIndex = changedNodesChildrenFirst . size ( ) ; Path changedNodePath = changedNode . getPath ( this ) ; for ( int i = 0 ; i < changedNodesChildrenFirst . size ( ) ; i ++ ) { if ( changedNodesChildrenFirst . get ( i ) . getPath ( this ) . isAncestorOf ( changedNodePath ) ) { insertIndex = i ; break ; } } changedNodesChildrenFirst . add ( insertIndex , changedNode ) ; } return changedNodesChildrenFirst ; } | Returns the list of changed nodes at or below the given path starting with the children . | 246 | 17 |
33,180 | private void completeTransaction ( final String txId , String wsName ) { getWorkspace ( ) . clear ( ) ; // reset the ws cache to the shared (global one) setWorkspaceCache ( sharedWorkspaceCache ( ) ) ; // and clear some tx specific data COMPLETE_FUNCTION_BY_TX_AND_WS . compute ( txId , ( transactionId , funcsByWsName ) -> { funcsByWsName . remove ( wsName ) ; if ( funcsByWsName . isEmpty ( ) ) { // this is the last ws cache we are clearing for this tx so mark all the keys as unlocked LOCKED_KEYS_BY_TX_ID . remove ( txId ) ; // and remove the map return null ; } // there are other ws caches which need clearing for this tx, so just return the updated map return funcsByWsName ; } ) ; } | Signal that the transaction that was active and in which this session participated has completed and that this session should no longer use a transaction - specific workspace cache . | 198 | 31 |
33,181 | private void rollback ( Transaction txn , Exception cause ) throws Exception { try { txn . rollback ( ) ; } catch ( Exception e ) { logger . debug ( e , "Error while rolling back transaction " + txn ) ; } finally { throw cause ; } } | Rolling back given transaction caused by given cause . | 58 | 10 |
33,182 | public static Date createDate ( Calendar target ) { return new java . sql . Date ( target . getTime ( ) . getTime ( ) ) ; } | Creates normalized SQL Date Object based on the target Calendar | 32 | 11 |
33,183 | public void mergeNodes ( DdlTokenStream tokens , AstNode firstNode , AstNode secondNode ) { assert tokens != null ; assert firstNode != null ; assert secondNode != null ; int firstStartIndex = ( Integer ) firstNode . getProperty ( DDL_START_CHAR_INDEX ) ; int secondStartIndex = ( Integer ) secondNode . getProperty ( DDL_START_CHAR_INDEX ) ; int deltaLength = ( ( String ) secondNode . getProperty ( DDL_EXPRESSION ) ) . length ( ) ; Position startPosition = new Position ( firstStartIndex , 1 , 0 ) ; Position endPosition = new Position ( ( secondStartIndex + deltaLength ) , 1 , 0 ) ; String source = tokens . getContentBetween ( startPosition , endPosition ) ; firstNode . setProperty ( DDL_EXPRESSION , source ) ; firstNode . setProperty ( DDL_LENGTH , source . length ( ) ) ; } | Merges second node into first node by re - setting expression source and length . | 208 | 16 |
33,184 | protected AstNode parseCreateStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { assert tokens != null ; assert parentNode != null ; AstNode stmtNode = null ; // DEFAULT DOES NOTHING // Subclasses can implement additional parsing // System.out.println(" >>> FOUND [CREATE] STATEMENT: TOKEN = " + tokens.consume() + " " + tokens.consume() + " " + // tokens.consume()); // SQL 92 CREATE OPTIONS: // CREATE SCHEMA // CREATE DOMAIN // CREATE [ { GLOBAL | LOCAL } TEMPORARY ] TABLE // CREATE VIEW // CREATE ASSERTION // CREATE CHARACTER SET // CREATE COLLATION // CREATE TRANSLATION if ( tokens . matches ( STMT_CREATE_SCHEMA ) ) { stmtNode = parseCreateSchemaStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_TABLE ) || tokens . matches ( STMT_CREATE_GLOBAL_TEMPORARY_TABLE ) || tokens . matches ( STMT_CREATE_LOCAL_TEMPORARY_TABLE ) ) { stmtNode = parseCreateTableStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_VIEW ) || tokens . matches ( STMT_CREATE_OR_REPLACE_VIEW ) ) { stmtNode = parseCreateViewStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_ASSERTION ) ) { stmtNode = parseCreateAssertionStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_CHARACTER_SET ) ) { stmtNode = parseCreateCharacterSetStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_COLLATION ) ) { stmtNode = parseCreateCollationStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_TRANSLATION ) ) { stmtNode = parseCreateTranslationStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( STMT_CREATE_DOMAIN ) ) { stmtNode = parseCreateDomainStatement ( tokens , parentNode ) ; } else { markStartOfStatement ( tokens ) ; stmtNode = parseIgnorableStatement ( tokens , "CREATE UNKNOWN" , parentNode ) ; Position position = getCurrentMarkedPosition ( ) ; String msg = DdlSequencerI18n . unknownCreateStatement . text ( position . getLine ( ) , position . getColumn ( ) ) ; DdlParserProblem problem = new DdlParserProblem ( DdlConstants . Problems . WARNING , position , msg ) ; stmtNode . setProperty ( DDL_PROBLEM , problem . toString ( ) ) ; markEndOfStatement ( tokens , stmtNode ) ; } return stmtNode ; } | Parses DDL CREATE statement based on SQL 92 specifications . | 654 | 14 |
33,185 | protected AstNode parseAlterStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { assert tokens != null ; assert parentNode != null ; if ( tokens . matches ( ALTER , TABLE ) ) { return parseAlterTableStatement ( tokens , parentNode ) ; } else if ( tokens . matches ( "ALTER" , "DOMAIN" ) ) { markStartOfStatement ( tokens ) ; tokens . consume ( "ALTER" , "DOMAIN" ) ; String domainName = parseName ( tokens ) ; AstNode alterNode = nodeFactory ( ) . node ( domainName , parentNode , TYPE_ALTER_DOMAIN_STATEMENT ) ; parseUntilTerminator ( tokens ) ; markEndOfStatement ( tokens , alterNode ) ; return alterNode ; } return null ; } | Parses DDL ALTER statement based on SQL 92 specifications . | 172 | 14 |
33,186 | protected String getTableElementsString ( DdlTokenStream tokens , boolean useTerminator ) throws ParsingException { assert tokens != null ; StringBuilder sb = new StringBuilder ( 100 ) ; if ( useTerminator ) { while ( ! isTerminator ( tokens ) ) { sb . append ( SPACE ) . append ( tokens . consume ( ) ) ; } } else { // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs. tokens . consume ( L_PAREN ) ; // EXPECTED int iParen = 0 ; while ( tokens . hasNext ( ) ) { if ( tokens . matches ( L_PAREN ) ) { iParen ++ ; } else if ( tokens . matches ( R_PAREN ) ) { if ( iParen == 0 ) { tokens . consume ( R_PAREN ) ; break ; } iParen -- ; } if ( isComment ( tokens ) ) { tokens . consume ( ) ; } else { sb . append ( SPACE ) . append ( tokens . consume ( ) ) ; } } } return sb . toString ( ) ; } | Method which extracts the table element string from a CREATE TABLE statement . | 262 | 14 |
33,187 | protected void parseConstraintAttributes ( DdlTokenStream tokens , AstNode constraintNode ) throws ParsingException { assert tokens != null ; assert constraintNode != null ; // Now we need to check for constraint attributes: // <constraint attributes> ::= // <constraint check time> [ [ NOT ] DEFERRABLE ] // | [ NOT ] DEFERRABLE [ <constraint check time> ] // // <constraint check time> ::= // INITIALLY DEFERRED // | INITIALLY IMMEDIATE // EXAMPLE : foreign key (contact_id) references contact (contact_id) on delete cascade INITIALLY DEFERRED, if ( tokens . canConsume ( "INITIALLY" , "DEFERRED" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "INITIALLY DEFERRED" ) ; } if ( tokens . canConsume ( "INITIALLY" , "IMMEDIATE" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "INITIALLY IMMEDIATE" ) ; } if ( tokens . canConsume ( "NOT" , "DEFERRABLE" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "NOT DEFERRABLE" ) ; } if ( tokens . canConsume ( "DEFERRABLE" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "DEFERRABLE" ) ; } if ( tokens . canConsume ( "INITIALLY" , "DEFERRED" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "INITIALLY DEFERRED" ) ; } if ( tokens . canConsume ( "INITIALLY" , "IMMEDIATE" ) ) { AstNode attrNode = nodeFactory ( ) . node ( "CONSTRAINT_ATTRIBUTE" , constraintNode , TYPE_CONSTRAINT_ATTRIBUTE ) ; attrNode . setProperty ( PROPERTY_VALUE , "INITIALLY IMMEDIATE" ) ; } } | Parses the attributes associated with any in - line column constraint definition or a table constrain definition . | 674 | 21 |
33,188 | protected List < String > getDataTypeStartWords ( ) { if ( allDataTypeStartWords == null ) { allDataTypeStartWords = new ArrayList < String > ( ) ; allDataTypeStartWords . addAll ( DataTypes . DATATYPE_START_WORDS ) ; allDataTypeStartWords . addAll ( getCustomDataTypeStartWords ( ) ) ; } return allDataTypeStartWords ; } | Returns a list of data type start words which can be used to help identify a column definition sub - statement . | 92 | 22 |
33,189 | protected String consumeIdentifier ( DdlTokenStream tokens ) throws ParsingException { String value = tokens . consume ( ) ; // This may surrounded by quotes, so remove them ... if ( value . charAt ( 0 ) == ' ' ) { int length = value . length ( ) ; // Check for the end quote ... value = value . substring ( 1 , length - 1 ) ; // not complete!! } // TODO: Handle warnings elegantly // else { // // Not quoted, so check for reserved words ... // if (tokens.isKeyWord(value)) { // // Record warning ... // System.out.println(" WARNING: Identifier [" + value + "] is a SQL 92 Reserved Word"); // } // } return value ; } | Consumes an token identifier which can be of the form of a simple string or a double - quoted string . | 159 | 22 |
33,190 | protected boolean parseColumnNameList ( DdlTokenStream tokens , AstNode parentNode , String referenceType ) { boolean parsedColumns = false ; // CONSUME COLUMNS List < String > columnNameList = new ArrayList < String > ( ) ; if ( tokens . matches ( L_PAREN ) ) { tokens . consume ( L_PAREN ) ; columnNameList = parseNameList ( tokens ) ; if ( ! columnNameList . isEmpty ( ) ) { parsedColumns = true ; } tokens . consume ( R_PAREN ) ; } for ( String columnName : columnNameList ) { nodeFactory ( ) . node ( columnName , parentNode , referenceType ) ; } return parsedColumns ; } | Adds column reference nodes to a parent node . Returns true if column references added false if not . | 153 | 19 |
33,191 | protected List < String > parseNameList ( DdlTokenStream tokens ) throws ParsingException { List < String > names = new LinkedList < String > ( ) ; while ( true ) { names . add ( parseName ( tokens ) ) ; if ( ! tokens . canConsume ( COMMA ) ) { break ; } } return names ; } | Parses a comma separated list of names . | 74 | 10 |
33,192 | protected String parseUntilTerminator ( DdlTokenStream tokens ) throws ParsingException { final StringBuilder sb = new StringBuilder ( ) ; boolean lastTokenWasPeriod = false ; Position prevPosition = ( tokens . hasNext ( ) ? tokens . nextPosition ( ) : Position . EMPTY_CONTENT_POSITION ) ; String prevToken = "" ; while ( tokens . hasNext ( ) && ! tokens . matches ( DdlTokenizer . STATEMENT_KEY ) && ( ( doUseTerminator ( ) && ! isTerminator ( tokens ) ) || ! doUseTerminator ( ) ) ) { final Position currPosition = tokens . nextPosition ( ) ; final String thisToken = tokens . consume ( ) ; final boolean thisTokenIsPeriod = thisToken . equals ( PERIOD ) ; final boolean thisTokenIsComma = thisToken . equals ( COMMA ) ; if ( lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma ) { sb . append ( thisToken ) ; } else if ( ( currPosition . getIndexInContent ( ) - prevPosition . getIndexInContent ( ) - prevToken . length ( ) ) > 0 ) { sb . append ( SPACE ) . append ( thisToken ) ; } else { sb . append ( thisToken ) ; } if ( thisTokenIsPeriod ) { lastTokenWasPeriod = true ; } else { lastTokenWasPeriod = false ; } prevToken = thisToken ; prevPosition = currPosition ; } return sb . toString ( ) ; } | Utility method which parses tokens until a terminator is found another statement is identified or there are no more tokens . | 335 | 24 |
33,193 | protected String parseUntilSemiColon ( DdlTokenStream tokens ) throws ParsingException { StringBuilder sb = new StringBuilder ( ) ; boolean lastTokenWasPeriod = false ; while ( tokens . hasNext ( ) && ! tokens . matches ( SEMICOLON ) ) { String thisToken = tokens . consume ( ) ; boolean thisTokenIsPeriod = thisToken . equals ( PERIOD ) ; boolean thisTokenIsComma = thisToken . equals ( COMMA ) ; if ( lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma ) { sb . append ( thisToken ) ; } else { sb . append ( SPACE ) . append ( thisToken ) ; } if ( thisTokenIsPeriod ) { lastTokenWasPeriod = true ; } else { lastTokenWasPeriod = false ; } } return sb . toString ( ) ; } | Utility method which parses tokens until a semicolon is found or there are no more tokens . | 192 | 21 |
33,194 | final boolean registerListener ( NodeTypes . Listener listener ) { return listener != null ? this . listeners . addIfAbsent ( listener ) : false ; } | Add a listener that will be notified when the NodeTypes changes . Listeners will be called in a single thread and should do almost no work . | 33 | 29 |
33,195 | boolean isNodeTypeInUse ( Name nodeTypeName ) throws InvalidQueryException { String nodeTypeString = nodeTypeName . getString ( context . getNamespaceRegistry ( ) ) ; String expression = "SELECT * from [" + nodeTypeString + "] LIMIT 1" ; TypeSystem typeSystem = context . getValueFactories ( ) . getTypeSystem ( ) ; // Parsing must be done now ... QueryCommand command = queryParser . parseQuery ( expression , typeSystem ) ; assert command != null : "Could not parse " + expression ; Schemata schemata = getRepositorySchemata ( ) ; // Now query the entire repository for any nodes that use this node type ... RepositoryCache repoCache = repository . repositoryCache ( ) ; RepositoryQueryManager queryManager = repository . queryManager ( ) ; Set < String > workspaceNames = repoCache . getWorkspaceNames ( ) ; Map < String , NodeCache > overridden = null ; NodeTypes nodeTypes = repository . nodeTypeManager ( ) . getNodeTypes ( ) ; RepositoryIndexes indexDefns = repository . queryManager ( ) . getIndexes ( ) ; CancellableQuery query = queryManager . query ( context , repoCache , workspaceNames , overridden , command , schemata , indexDefns , nodeTypes , null , null ) ; try { QueryResults result = query . execute ( ) ; if ( result . isEmpty ( ) ) return false ; if ( result . getRowCount ( ) < 0 ) { // Try to get the first row ... NodeSequence seq = result . getRows ( ) ; Batch batch = seq . nextBatch ( ) ; while ( batch != null ) { if ( batch . hasNext ( ) ) return true ; // It's not common for the first batch may be empty, but it's possible. So try the next batch ... batch = seq . nextBatch ( ) ; } return false ; } return result . getRowCount ( ) > 0 ; } catch ( RepositoryException e ) { logger . error ( e , JcrI18n . errorCheckingNodeTypeUsage , nodeTypeName , e . getLocalizedMessage ( ) ) ; return true ; } } | Check if the named node type is in use in any workspace in the repository | 471 | 15 |
33,196 | protected void doInitialize ( IndexProvider provider ) throws RepositoryException { // Set the execution context instance ... Reflection . setValue ( provider , "context" , repository . context ( ) ) ; // Set the environment Reflection . setValue ( provider , "environment" , repository . environment ( ) ) ; provider . initialize ( ) ; // If successful, call the 'postInitialize' method reflectively (due to inability to call directly) ... Method postInitialize = Reflection . findMethod ( IndexProvider . class , "postInitialize" ) ; Reflection . invokeAccessibly ( provider , postInitialize , new Object [ ] { } ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Successfully initialized index provider '{0}' in repository '{1}'" , provider . getName ( ) , repository . name ( ) ) ; } } | Initialize the supplied provider . | 187 | 6 |
33,197 | IndexWriter getIndexWriterForProviders ( Set < String > providerNames ) { List < IndexProvider > reindexProviders = new LinkedList <> ( ) ; for ( IndexProvider provider : providers . values ( ) ) { if ( providerNames . contains ( provider . getName ( ) ) ) { reindexProviders . add ( provider ) ; } } return CompositeIndexWriter . create ( reindexProviders ) ; } | Get the query index writer that will delegate to only those registered providers with the given names . | 91 | 18 |
33,198 | public void start ( ) { ObjectName beanName = null ; try { MBeanServer server = ManagementFactory . getPlatformMBeanServer ( ) ; beanName = getObjectName ( ) ; server . registerMBean ( this , beanName ) ; } catch ( InstanceAlreadyExistsException e ) { LOGGER . warn ( JcrI18n . mBeanAlreadyRegistered , beanName ) ; } catch ( Exception e ) { LOGGER . error ( e , JcrI18n . cannotRegisterMBean , beanName ) ; } } | Initializes & registers this MBean with the local MBean server . | 120 | 16 |
33,199 | public void stop ( ) { MBeanServer server = ManagementFactory . getPlatformMBeanServer ( ) ; ObjectName beanName = null ; try { beanName = getObjectName ( ) ; server . unregisterMBean ( beanName ) ; } catch ( InstanceNotFoundException e ) { LOGGER . debug ( "JMX bean {0} not found" , beanName ) ; } catch ( Exception e ) { LOGGER . error ( e , JcrI18n . cannotUnRegisterMBean , beanName ) ; } } | Un - registers the bean from the JMX server . | 118 | 11 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.