signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RecoverableUnitIdTable { /** * Returns the next available id , starting from 1 , and associates * it with the given object . This method should be used during * the creation of a new recoverable object . * @ param obj The object to be associated with the id * @ return The next available recoverable unit id */ public final synchronized long nextId ( Object obj ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "nextId" , obj ) ; long id = _idCount ++ ; // Keep incrementing the id until we // find one that hasn ' t been reserved . while ( _idMap . get ( id ) != null ) { id = _idCount ++ ; } // Add the new id to the map associating // it with the given object . _idMap . put ( id , obj ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "nextId" , new Long ( id ) ) ; return id ;
public class Jar { /** * Adds the contents of the zip / JAR contained in the given byte array to this JAR . * @ param path the path within the JAR where the root of the zip will be placed , or { @ code null } for the JAR ' s root * @ param zip the contents of the zip / JAR file * @ return { @ code this } */ public Jar addEntries ( Path path , ZipInputStream zip ) throws IOException { } }
return addEntries ( path , zip , null ) ;
public class PHSCoverLetterV1_2Generator { /** * This method is used to get PHSCoverLetter12Document attachment from the * narrative attachments . * @ return phsCoverLetter12Document { @ link XmlObject } of type * PHS398CoverLetterDocument . */ private PHSCoverLetter12Document getPHSCoverLetter ( ) { } }
PHSCoverLetter12Document phsCoverLetterDocument = PHSCoverLetter12Document . Factory . newInstance ( ) ; PHSCoverLetter12 phsCoverLetter = PHSCoverLetter12 . Factory . newInstance ( ) ; CoverLetterFile coverLetterFile = CoverLetterFile . Factory . newInstance ( ) ; phsCoverLetter . setFormVersion ( FormVersion . v1_2 . getVersion ( ) ) ; AttachedFileDataType attachedFileDataType = null ; for ( NarrativeContract narrative : pdDoc . getDevelopmentProposal ( ) . getNarratives ( ) ) { if ( narrative . getNarrativeType ( ) . getCode ( ) != null && Integer . parseInt ( narrative . getNarrativeType ( ) . getCode ( ) ) == NARRATIVE_PHS_COVER_LETTER ) { attachedFileDataType = getAttachedFileType ( narrative ) ; if ( attachedFileDataType != null ) { coverLetterFile . setCoverLetterFilename ( attachedFileDataType ) ; break ; } } } phsCoverLetter . setCoverLetterFile ( coverLetterFile ) ; phsCoverLetterDocument . setPHSCoverLetter12 ( phsCoverLetter ) ; return phsCoverLetterDocument ;
public class SCM { /** * Calculates the { @ link SCMRevisionState } that represents the state of the workspace of the given build . * The returned object is then fed into the * { @ link # compareRemoteRevisionWith ( AbstractProject , Launcher , FilePath , TaskListener , SCMRevisionState ) } method * as the baseline { @ link SCMRevisionState } to determine if the build is necessary . * This method is called after source code is checked out for the given build ( that is , after * { @ link SCM # checkout ( Run , Launcher , FilePath , TaskListener , File , SCMRevisionState ) } has finished successfully . ) * The obtained object is added to the build as an { @ link Action } for later retrieval . As an optimization , * { @ link SCM } implementation can choose to compute { @ link SCMRevisionState } and add it as an action * during check out , in which case this method will not called . * @ param build * The calculated { @ link SCMRevisionState } is for the files checked out in this build . * If { @ link # requiresWorkspaceForPolling ( ) } returns true , Hudson makes sure that the workspace of this * build is available and accessible by the callee . * @ param workspace the location of the checkout ; normally not null , since this will normally be called immediately after checkout , * though could be null if data is being loaded from a very old version of Jenkins and the SCM declares that it does not require a workspace for polling * @ param launcher * Abstraction of the machine where the polling will take place . Nullness matches that of { @ code workspace } . * @ param listener * Logs during the polling should be sent here . * @ throws InterruptedException * interruption is usually caused by the user aborting the computation . * this exception should be simply propagated all the way up . * @ since 1.568 */ public @ CheckForNull SCMRevisionState calcRevisionsFromBuild ( @ Nonnull Run < ? , ? > build , @ Nullable FilePath workspace , @ Nullable Launcher launcher , @ Nonnull TaskListener listener ) throws IOException , InterruptedException { } }
if ( build instanceof AbstractBuild && Util . isOverridden ( SCM . class , getClass ( ) , "calcRevisionsFromBuild" , AbstractBuild . class , Launcher . class , TaskListener . class ) ) { return calcRevisionsFromBuild ( ( AbstractBuild ) build , launcher , listener ) ; } else { throw new AbstractMethodError ( "you must override the new calcRevisionsFromBuild overload" ) ; }
public class Flow { /** * Updates the history such that the given key is at the top and dispatches the updated * history . * If newTopKey is already at the top of the history , the history will be unchanged , but it will * be dispatched with direction { @ link Direction # REPLACE } . * If newTopKey is already on the history but not at the top , the stack will pop until newTopKey * is at the top , and the dispatch direction will be { @ link Direction # BACKWARD } . * If newTopKey is not already on the history , it will be pushed and the dispatch direction will * be { @ link Direction # FORWARD } . * Objects ' equality is always checked using { @ link Object # equals ( Object ) } . */ public void set ( @ NonNull final Object newTopKey ) { } }
move ( new PendingTraversal ( ) { @ Override void doExecute ( ) { if ( newTopKey . equals ( history . top ( ) ) ) { dispatch ( history , Direction . REPLACE ) ; return ; } History . Builder builder = history . buildUpon ( ) ; int count = 0 ; // Search backward to see if we already have newTop on the stack Object preservedInstance = null ; for ( Object entry : history . framesFromBottom ( ) ) { // If we find newTop on the stack , pop back to it . if ( entry . equals ( newTopKey ) ) { for ( int i = 0 ; i < history . size ( ) - count ; i ++ ) { preservedInstance = builder . pop ( ) ; } break ; } else { count ++ ; } } History newHistory ; if ( preservedInstance != null ) { // newTop was on the history . Put the preserved instance back on and dispatch . builder . push ( preservedInstance ) ; newHistory = builder . build ( ) ; dispatch ( newHistory , Direction . BACKWARD ) ; } else { // newTop was not on the history . Push it on and dispatch . builder . push ( newTopKey ) ; newHistory = builder . build ( ) ; dispatch ( newHistory , Direction . FORWARD ) ; } } } ) ;
public class JcrQueryManager { /** * Creates a new JCR { @ link Query } by specifying the query expression itself , the language in which the query is stated , the * { @ link QueryCommand } representation and , optionally , the node from which the query was loaded . The language must be a * string from among those returned by { @ code QueryManager # getSupportedQueryLanguages ( ) } . * @ param expression the original query expression as supplied by the client ; may not be null * @ param language the language in which the expression is represented ; may not be null * @ param storedAtPath the path at which this query was stored , or null if this is not a stored query * @ param locale an optional { @ link Locale } instance or null if no specific locale is to be used . * @ return query the JCR query object ; never null * @ throws InvalidQueryException if expression is invalid or language is unsupported * @ throws RepositoryException if the session is no longer live */ public org . modeshape . jcr . api . query . Query createQuery ( String expression , String language , Path storedAtPath , Locale locale ) throws InvalidQueryException , RepositoryException { } }
session . checkLive ( ) ; // Look for a parser for the specified language . . . QueryParsers queryParsers = session . repository ( ) . runningState ( ) . queryParsers ( ) ; QueryParser parser = queryParsers . getParserFor ( language ) ; if ( parser == null ) { Set < String > languages = queryParsers . getLanguages ( ) ; throw new InvalidQueryException ( JcrI18n . invalidQueryLanguage . text ( language , languages ) ) ; } try { // Parsing must be done now . . . QueryCommand command = parser . parseQuery ( expression , typeSystem ) ; if ( command == null ) { // The query is not well - formed and cannot be parsed . . . throw new InvalidQueryException ( JcrI18n . queryCannotBeParsedUsingLanguage . text ( language , expression ) ) ; } // Set up the hints . . . PlanHints hints = new PlanHints ( ) ; hints . showPlan = true ; hints . hasFullTextSearch = true ; // always include the score hints . validateColumnExistance = false ; // see MODE - 1055 if ( parser . getLanguage ( ) . equals ( QueryLanguage . JCR_SQL2 ) ) { hints . qualifyExpandedColumnNames = true ; } return resultWith ( expression , parser . getLanguage ( ) , command , hints , storedAtPath , locale ) ; } catch ( ParsingException e ) { // The query is not well - formed and cannot be parsed . . . String reason = e . getMessage ( ) ; throw new InvalidQueryException ( JcrI18n . queryCannotBeParsedUsingLanguage . text ( language , expression , reason ) ) ; } catch ( org . modeshape . jcr . query . parse . InvalidQueryException e ) { // The query was parsed , but there is an error in the query String reason = e . getMessage ( ) ; throw new InvalidQueryException ( JcrI18n . queryInLanguageIsNotValid . text ( language , expression , reason ) ) ; }
public class XmlExtractor { /** * { @ inheritDoc } */ public String extract ( Object target ) { } }
if ( target == null ) { return null ; } Document sourceDoc = ( Document ) target ; Element sourceElement = sourceDoc . getDocumentElement ( ) ; String nsUri = namespace == null ? sourceElement . getNamespaceURI ( ) : namespace ; if ( sourceElement . hasAttributeNS ( nsUri , nodeName ) ) { return sourceElement . getAttributeNS ( nsUri , nodeName ) ; } else { NodeList candidates = sourceElement . getElementsByTagNameNS ( nsUri , nodeName ) ; if ( candidates . getLength ( ) > 0 ) { return candidates . item ( 0 ) . getTextContent ( ) ; } } return null ;
public class ModifyCapacityReservationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < ModifyCapacityReservationRequest > getDryRunRequest ( ) { } }
Request < ModifyCapacityReservationRequest > request = new ModifyCapacityReservationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class CPRuleAssetCategoryRelPersistenceImpl { /** * Returns the first cp rule asset category rel in the ordered set where CPRuleId = & # 63 ; . * @ param CPRuleId the cp rule ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp rule asset category rel * @ throws NoSuchCPRuleAssetCategoryRelException if a matching cp rule asset category rel could not be found */ @ Override public CPRuleAssetCategoryRel findByCPRuleId_First ( long CPRuleId , OrderByComparator < CPRuleAssetCategoryRel > orderByComparator ) throws NoSuchCPRuleAssetCategoryRelException { } }
CPRuleAssetCategoryRel cpRuleAssetCategoryRel = fetchByCPRuleId_First ( CPRuleId , orderByComparator ) ; if ( cpRuleAssetCategoryRel != null ) { return cpRuleAssetCategoryRel ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPRuleId=" ) ; msg . append ( CPRuleId ) ; msg . append ( "}" ) ; throw new NoSuchCPRuleAssetCategoryRelException ( msg . toString ( ) ) ;
public class MapTileCollisionRendererModel { /** * Create the function draw to buffer . * @ param collision The collision reference . * @ param tw The tile width . * @ param th The tile height . * @ return The created collision representation buffer . */ public static ImageBuffer createFunctionDraw ( CollisionFormula collision , int tw , int th ) { } }
final ImageBuffer buffer = Graphics . createImageBuffer ( tw , th , ColorRgba . TRANSPARENT ) ; final Graphic g = buffer . createGraphic ( ) ; g . setColor ( ColorRgba . PURPLE ) ; createFunctionDraw ( g , collision , tw , th ) ; g . dispose ( ) ; return buffer ;
public class Benchmark { /** * Rosenbrock ' s function */ static public double rosenbrock ( double [ ] x ) { } }
double sum = 0.0 ; for ( int i = 0 ; i < ( x . length - 1 ) ; i ++ ) { double temp1 = ( x [ i ] * x [ i ] ) - x [ i + 1 ] ; double temp2 = x [ i ] - 1.0 ; sum += ( 100.0 * temp1 * temp1 ) + ( temp2 * temp2 ) ; } return ( sum ) ;
public class WktConversionUtils { /** * If the old value is a geometry or a Well - Known Text , convert it to the * appropriate new value . Otherwise , this method returns the old value . * @ param oldValue * the old value . * @ param database * the database instance . * @ param generator * the SQL generator . * @ return the new value . */ public static Object handleColumnValue ( final Object oldValue , final Database database , final WktInsertOrUpdateGenerator generator ) { } }
Object newValue = oldValue ; if ( oldValue instanceof Geometry ) { final Geometry geometry = ( Geometry ) oldValue ; final String wkt = geometry . toText ( ) ; String sridString = null ; if ( geometry . getSRID ( ) > 0 ) { sridString = String . valueOf ( geometry . getSRID ( ) ) ; } newValue = generator . convertToFunction ( wkt , sridString , database ) ; } else if ( oldValue instanceof String ) { final String value = oldValue . toString ( ) . trim ( ) ; final Matcher matcher = EWKT_PATTERN . matcher ( value ) ; if ( matcher . matches ( ) ) { final String sridString = matcher . group ( 2 ) ; final String wkt = matcher . group ( 3 ) ; final String function = generator . convertToFunction ( wkt , sridString , database ) ; newValue = function ; } } return newValue ;
public class ScopeHandler { /** * The parameter must never be null * @ param queryParameters */ public void init ( final MultivaluedMap < String , String > queryParameters ) { } }
final String scopeCompileParam = queryParameters . getFirst ( ServerAPI . SCOPE_COMPILE_PARAM ) ; if ( scopeCompileParam != null ) { this . scopeComp = Boolean . valueOf ( scopeCompileParam ) ; } final String scopeProvidedParam = queryParameters . getFirst ( ServerAPI . SCOPE_PROVIDED_PARAM ) ; if ( scopeProvidedParam != null ) { this . scopePro = Boolean . valueOf ( scopeProvidedParam ) ; } final String scopeRuntimeParam = queryParameters . getFirst ( ServerAPI . SCOPE_RUNTIME_PARAM ) ; if ( scopeRuntimeParam != null ) { this . scopeRun = Boolean . valueOf ( scopeRuntimeParam ) ; } final String scopeTestParam = queryParameters . getFirst ( ServerAPI . SCOPE_TEST_PARAM ) ; if ( scopeTestParam != null ) { this . scopeTest = Boolean . valueOf ( scopeTestParam ) ; }
public class HSQLInterface { /** * Modify the current schema with a SQL DDL command and get the * diff which represents the changes . * Note that you have to be consistent WRT case for the expected names . * @ param expectedTableAffected The name of the table affected by this DDL * or null if unknown * @ param expectedIndexAffected The name of the index affected by this DDL * or null if table is known instead * @ param ddl The SQL DDL statement to be run . * @ return the " diff " of the before and after trees for the affected table * @ throws HSQLParseException Throws exception if SQL parse error is * encountered . */ public VoltXMLDiff runDDLCommandAndDiff ( HSQLDDLInfo stmtInfo , String ddl ) throws HSQLParseException { } }
// name of the table we ' re going to have to diff ( if any ) String expectedTableAffected = null ; // If we fail to pre - process a statement , then we want to fail , but we ' re // still going to run the statement through HSQL to get its error message . // This variable helps us make sure we don ' t fail to preprocess and then // succeed at runnign the statement through HSQL . boolean expectFailure = false ; // If cascade , we ' re going to need to look for any views that might have // gotten deleted . So get a list of all tables and views that existed before // we run the ddl , then we ' ll do a comparison later . Set < String > existingTableNames = null ; if ( stmtInfo != null ) { if ( stmtInfo . cascade ) { existingTableNames = getTableNames ( ) ; } // we either have an index name or a table / view name , but not both if ( stmtInfo . noun == HSQLDDLInfo . Noun . INDEX ) { if ( stmtInfo . verb == HSQLDDLInfo . Verb . CREATE ) { expectedTableAffected = stmtInfo . secondName ; } else { expectedTableAffected = tableNameForIndexName ( stmtInfo . name ) ; } } else { expectedTableAffected = stmtInfo . name ; } // Note that we ' re assuming ifexists can ' t happen with " create " expectFailure = ( expectedTableAffected == null ) && ! stmtInfo . ifexists ; } else { expectFailure = true ; } runDDLCommand ( ddl ) ; // If we expect to fail , but the statement above didn ' t bail . . . // ( Shouldn ' t get here ever I think ) if ( expectFailure ) { throw new HSQLParseException ( "Unable to plan statement due to VoltDB DDL pre-processing error" ) ; } // sanity checks for non - failure assert ( stmtInfo != null ) ; // get old and new XML representations for the affected table VoltXMLElement tableXMLNew = null , tableXMLOld = null ; if ( expectedTableAffected != null ) { tableXMLNew = getXMLForTable ( expectedTableAffected ) ; tableXMLOld = lastSchema . get ( expectedTableAffected ) ; } // valid reasons for tableXMLNew to be null are DROP IF EXISTS and not much else if ( tableXMLNew == null ) { tableXMLNew = emptySchema ; } // the old table can be null for CREATE TABLE or for IF EXISTS stuff if ( tableXMLOld == null ) { tableXMLOld = emptySchema ; } VoltXMLDiff diff = VoltXMLElement . computeDiff ( tableXMLOld , tableXMLNew ) ; // now find any views that might be missing and make sure the diff reflects that // they ' re gone if ( stmtInfo . cascade ) { Set < String > finalTableNames = getTableNames ( ) ; for ( String tableName : existingTableNames ) { if ( ! finalTableNames . contains ( tableName ) ) { tableName = tableName . toLowerCase ( ) ; tableXMLOld = lastSchema . get ( tableName ) . children . get ( 0 ) ; lastSchema . remove ( tableName ) ; if ( tableName . equals ( expectedTableAffected ) ) { continue ; } diff . m_removedElements . add ( tableXMLOld ) ; } } } // this is a hack to allow the diff - apply - er to accept a diff that has no order diff . m_elementOrder . clear ( ) ; // remember the current schema if ( expectedTableAffected != null ) { lastSchema . put ( expectedTableAffected , tableXMLNew . duplicate ( ) ) ; } return diff ;
public class WeightedDirectedMultigraph { /** * { @ inheritDoc } */ public IntSet predecessors ( int vertex ) { } }
SparseWeightedDirectedTypedEdgeSet < T > edges = vertexToEdges . get ( vertex ) ; return ( edges == null ) ? PrimitiveCollections . emptyIntSet ( ) : PrimitiveCollections . unmodifiableSet ( edges . predecessors ( ) ) ;
public class QQPlot { /** * Create a plot canvas with the two sample Q - Q plot . * The x - axis is the quantiles of x and the y - axis is the quantiles of y . * @ param x a sample set . * @ param y a sample set . */ public static PlotCanvas plot ( double [ ] x , double [ ] y ) { } }
double [ ] lowerBound = { Math . min ( x ) , Math . min ( y ) } ; double [ ] upperBound = { Math . max ( x ) , Math . max ( y ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound ) ; canvas . add ( new QQPlot ( x , y ) ) ; return canvas ;
public class LongMultiset { /** * The implementation is equivalent to performing the following steps for this LongMultiset : * < pre > * final long oldValue = get ( key ) ; * final long newValue = remappingFunction . apply ( key , oldValue ) ; * if ( newValue > 0 ) { * set ( key , newValue ) ; * } else { * if ( oldValue > 0 ) { * remove ( key ) ; * return newValue ; * < / pre > * @ param key * @ param remappingFunction * @ return */ public < E extends Exception > long compute ( T key , Try . BiFunction < ? super T , Long , Long , E > remappingFunction ) throws E { } }
N . checkArgNotNull ( remappingFunction ) ; final long oldValue = get ( key ) ; final long newValue = remappingFunction . apply ( key , oldValue ) ; if ( newValue > 0 ) { set ( key , newValue ) ; } else { if ( oldValue > 0 ) { remove ( key ) ; } } return newValue ;
public class XMPPTCPConnection { /** * Initializes the connection by creating a stanza reader and writer and opening a * XMPP stream to the server . * @ throws XMPPException if establishing a connection to the server fails . * @ throws SmackException if the server fails to respond back or if there is anther error . * @ throws IOException * @ throws InterruptedException */ private void initConnection ( ) throws IOException , InterruptedException { } }
compressionHandler = null ; // Set the reader and writer instance variables initReaderAndWriter ( ) ; int availableReaderWriterSemaphorePermits = readerWriterSemaphore . availablePermits ( ) ; if ( availableReaderWriterSemaphorePermits < 2 ) { Object [ ] logObjects = new Object [ ] { this , availableReaderWriterSemaphorePermits , } ; LOGGER . log ( Level . FINE , "Not every reader/writer threads where terminated on connection re-initializtion of {0}. Available permits {1}" , logObjects ) ; } readerWriterSemaphore . acquire ( 2 ) ; // Start the writer thread . This will open an XMPP stream to the server packetWriter . init ( ) ; // Start the reader thread . The startup ( ) method will block until we // get an opening stream packet back from server packetReader . init ( ) ;
public class SysFileExtensionEditorPanel { /** * GEN - LAST : event _ buttonResetActionPerformed */ private void buttonAddLineActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ buttonAddLineActionPerformed ( ( DefaultTableModel ) this . tableExtensions . getModel ( ) ) . addRow ( new String [ ] { "" } ) ;
public class OtpErlangLong { /** * Get this number as a byte . * @ return the byte value of this number . * @ exception OtpErlangRangeException * if the value is too large to be represented as a byte . */ public byte byteValue ( ) throws OtpErlangRangeException { } }
final long l = longValue ( ) ; final byte i = ( byte ) l ; if ( i != l ) { throw new OtpErlangRangeException ( "Value too large for byte: " + val ) ; } return i ;
public class DefaultDeleteResolver { /** * { @ inheritDoc } */ @ NonNull @ Override public DeleteResult performDelete ( @ NonNull StorIOContentResolver storIOContentResolver , @ NonNull T object ) { } }
final DeleteQuery deleteQuery = mapToDeleteQuery ( object ) ; final int numberOfRowsDeleted = storIOContentResolver . lowLevel ( ) . delete ( deleteQuery ) ; return DeleteResult . newInstance ( numberOfRowsDeleted , deleteQuery . uri ( ) ) ;
public class Async { /** * Invokes the asynchronous function immediately , surfacing the result through an Observable and waits on * the specified Scheduler . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / startFuture . s . png " alt = " " > * @ param < T > the result type * @ param functionAsync the asynchronous function to run * @ param scheduler the Scheduler where the completion of the Future is awaited * @ return an Observable that surfaces the result of the future * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Async - Operators # wiki - startfuture " > RxJava Wiki : startFuture ( ) < / a > */ public static < T > Observable < T > startFuture ( Func0 < ? extends Future < ? extends T > > functionAsync , Scheduler scheduler ) { } }
return OperatorStartFuture . startFuture ( functionAsync , scheduler ) ;
public class MergeConverter { /** * Get the Current converter . * @ return The current converter depending on the current table in the merge table . */ public Converter getNextConverter ( ) { } }
BaseTable currentTable = m_MergeRecord . getTable ( ) . getCurrentTable ( ) ; if ( currentTable == null ) return null ; if ( m_FieldSeq != - 1 ) { if ( m_strLinkedRecord == null ) return currentTable . getRecord ( ) . getField ( m_FieldSeq ) ; // Get the current field else return currentTable . getRecord ( ) . getField ( m_strLinkedRecord , m_FieldSeq ) ; // Get the current field } else { for ( int index = 0 ; index < m_vArray . size ( ) ; index ++ ) { InfoList infoList = ( InfoList ) m_vArray . elementAt ( index ) ; Record pQueryTable = currentTable . getRecord ( ) . getRecord ( infoList . m_strFileName ) ; if ( pQueryTable != null ) { String strLinkFileName = infoList . m_strLinkFileName ; int iFieldSeq = infoList . m_iFieldSeq ; Converter field = infoList . m_converter ; if ( field != null ) return field ; if ( strLinkFileName != null ) return currentTable . getRecord ( ) . getField ( strLinkFileName , iFieldSeq ) ; return pQueryTable . getField ( iFieldSeq ) ; } } } return null ;
public class OAuth2AuthenticationProvider { /** * Performs authentication with the same contract as { @ link * org . springframework . security . authentication . AuthenticationManager # authenticate ( org . springframework . security . core . Authentication ) } . * @ param authentication the authentication request object . * @ return a fully authenticated object including credentials . May return < code > null < / code > if the * < code > AuthenticationProvider < / code > is unable to support authentication of the passed * < code > Authentication < / code > object . In such a case , the next < code > AuthenticationProvider < / code > that * supports the presented < code > Authentication < / code > class will be tried . * @ throws org . springframework . security . core . AuthenticationException * if authentication fails . */ @ Override public Authentication authenticate ( Authentication authentication ) throws AuthenticationException { } }
if ( ! supports ( authentication . getClass ( ) ) ) { return null ; } LOGGER . debug ( "OAuth2Authentication authentication request: " + authentication ) ; if ( authentication . getCredentials ( ) == null ) { LOGGER . debug ( "No credentials found in request." ) ; if ( throwExceptionWhenTokenRejected ) { throw new BadCredentialsException ( "No pre-authenticated credentials found in request." ) ; } return null ; } String token = getAccessToken ( authentication ) ; OAuth2AuthenticationToken tmpToken = new OAuth2AuthenticationToken ( token ) ; UserDetails ud = authenticatedUserDetailsService . loadUserDetails ( tmpToken ) ; userDetailsChecker . check ( ud ) ; OAuth2AuthenticationToken result = new OAuth2AuthenticationToken ( ud , token , ud . getAuthorities ( ) ) ; result . setDetails ( authentication . getDetails ( ) ) ; return result ;
public class Output { /** * - - code context */ public void openCode ( Code code ) { } }
if ( context != null ) { throw new RuntimeException ( "nested code attribute" ) ; } context = code ; codeStart = getGlobalOfs ( ) ;
public class ClientCnxn { /** * Shutdown the send / event threads . This method should not be called * directly - rather it should be called as part of close operation . This * method is primarily here to allow the tests to verify disconnection * behavior . */ public void disconnect ( ) { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Disconnecting client for session: 0x" + Long . toHexString ( getSessionId ( ) ) ) ; } sendThread . close ( ) ; eventThread . queueEventOfDeath ( ) ;
public class SecureAction { /** * Returns a ZipFile . Same as calling * new ZipFile ( file ) * @ param file the file to get a ZipFile for * @ return a ZipFile * @ throws IOException if an error occured */ public ZipFile getZipFile ( final File file ) throws IOException { } }
try { if ( System . getSecurityManager ( ) == null ) return new ZipFile ( file ) ; try { return AccessController . doPrivileged ( new PrivilegedExceptionAction < ZipFile > ( ) { @ Override public ZipFile run ( ) throws IOException { return new ZipFile ( file ) ; } } , controlContext ) ; } catch ( PrivilegedActionException e ) { if ( e . getException ( ) instanceof IOException ) throw ( IOException ) e . getException ( ) ; throw ( RuntimeException ) e . getException ( ) ; } } catch ( ZipException e ) { ZipException zipNameException = new ZipException ( "Exception in opening zip file: " + file . getPath ( ) ) ; // $ NON - NLS - 1 $ zipNameException . initCause ( e ) ; throw zipNameException ; } catch ( IOException e ) { throw new IOException ( "Exception in opening zip file: " + file . getPath ( ) , e ) ; // $ NON - NLS - 1 $ }
public class ListenersImpl { /** * Returns all < code > listener < / code > elements * @ return list of < code > listener < / code > */ public List < Listener < Listeners < T > > > getAllListener ( ) { } }
List < Listener < Listeners < T > > > list = new ArrayList < Listener < Listeners < T > > > ( ) ; List < Node > nodeList = childNode . get ( "listener" ) ; for ( Node node : nodeList ) { Listener < Listeners < T > > type = new ListenerImpl < Listeners < T > > ( this , "listener" , childNode , node ) ; list . add ( type ) ; } return list ;
public class GrailsHibernateTemplate { /** * Prepare the given Query object , applying cache settings and / or a * transaction timeout . * @ param query the Query object to prepare */ protected void prepareQuery ( org . hibernate . query . Query query ) { } }
if ( cacheQueries ) { query . setCacheable ( true ) ; } if ( shouldPassReadOnlyToHibernate ( ) ) { query . setReadOnly ( true ) ; } SessionHolder sessionHolder = ( SessionHolder ) TransactionSynchronizationManager . getResource ( sessionFactory ) ; if ( sessionHolder != null && sessionHolder . hasTimeout ( ) ) { query . setTimeout ( sessionHolder . getTimeToLiveInSeconds ( ) ) ; }
public class GEIMGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GEIMG__DATA : setDATA ( DATA_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class BaseJdbcBufferedInserter { /** * Submits the user defined { @ link # insertBatch ( PreparedStatement ) } call to the { @ link Retryer } which takes care * of resubmitting the records according to { @ link # WRITER _ JDBC _ INSERT _ RETRY _ TIMEOUT } and { @ link # WRITER _ JDBC _ INSERT _ RETRY _ MAX _ ATTEMPT } * when failure happens . * @ param pstmt PreparedStatement object */ protected void executeBatchInsert ( final PreparedStatement pstmt ) { } }
try { // Need a Callable interface to be wrapped by Retryer . this . retryer . wrap ( new Callable < Boolean > ( ) { @ Override public Boolean call ( ) throws Exception { return insertBatch ( pstmt ) ; } } ) . call ( ) ; } catch ( Exception e ) { throw new RuntimeException ( "Failed to insert." , e ) ; } resetBatch ( ) ;
public class BatchResources { /** * Returns a batch by its ID * @ param req HTTPServlet request . Cannot be null . * @ param batchId The batch ID to retrieve . * @ return The corresponding batch * @ throws WebApplicationException If an error occurs . */ @ GET @ Produces ( MediaType . APPLICATION_JSON ) @ Path ( "/{batchId}" ) @ Description ( "Return a batch query's status and results if done" ) public Response getBatchById ( @ Context HttpServletRequest req , @ PathParam ( "batchId" ) String batchId ) { } }
_validateBatchId ( batchId ) ; PrincipalUser currentOwner = validateAndGetOwner ( req , null ) ; BatchMetricQuery batch = batchService . findBatchById ( batchId ) ; if ( batch != null ) { PrincipalUser actualOwner = userService . findUserByUsername ( batch . getOwnerName ( ) ) ; validateResourceAuthorization ( req , actualOwner , currentOwner ) ; return Response . ok ( BatchDto . transformToDto ( batch ) , MediaType . APPLICATION_JSON ) . build ( ) ; } throw new WebApplicationException ( Response . Status . NOT_FOUND . getReasonPhrase ( ) , Response . Status . NOT_FOUND ) ;
public class InnerNodeImpl { /** * Returns true if { @ code pattern } equals either " * " or { @ code s } . Pattern * may be { @ code null } . */ private static boolean matchesNameOrWildcard ( String pattern , String s ) { } }
return "*" . equals ( pattern ) || Objects . equal ( pattern , s ) ;
public class PlacedPlayerSessionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PlacedPlayerSession placedPlayerSession , ProtocolMarshaller protocolMarshaller ) { } }
if ( placedPlayerSession == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( placedPlayerSession . getPlayerId ( ) , PLAYERID_BINDING ) ; protocolMarshaller . marshall ( placedPlayerSession . getPlayerSessionId ( ) , PLAYERSESSIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StandardMultipartResolver { /** * Determine the encoding for the given request . * < p > The default implementation checks the request encoding , falling back to the default encoding specified for * this resolver . * @ param request current request * @ return the encoding for the request . */ @ NonNull private String determineEncoding ( HttpRequest request ) { } }
MediaType mimeType = request . getContentType ( ) ; if ( mimeType == null ) return Charsets . UTF_8 . name ( ) ; Charset charset = mimeType . getCharset ( ) ; return charset == null ? Charsets . UTF_8 . name ( ) : charset . name ( ) ;
public class BsElevateWordCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsElevateWordCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsElevateWordCA > aggsLambda ) { } }
ElevateWordCQ cq = new ElevateWordCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { ElevateWordCA ca = new ElevateWordCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; }
public class Iso8601UtilLimitedImpl { /** * @ see # formatTime ( Date , boolean , Appendable ) * @ param hours are the { @ link java . util . Calendar # HOUR _ OF _ DAY hours } . * @ param minutes are the { @ link java . util . Calendar # MINUTE minutes } . * @ param seconds are the { @ link java . util . Calendar # SECOND seconds } . * @ param extended if { @ code false } the basic time format ( " HHmmss " ) is used , if { @ code true } the extended * time format ( " HH : mm : ss " ) is used . * @ param buffer is where to append the formatted date . */ public void formatTime ( int hours , int minutes , int seconds , boolean extended , Appendable buffer ) { } }
try { // append hours String hour = String . valueOf ( hours ) ; if ( hour . length ( ) < 2 ) { buffer . append ( '0' ) ; } buffer . append ( hour ) ; if ( extended ) { buffer . append ( ':' ) ; } String minute = String . valueOf ( minutes ) ; // append minutes if ( minute . length ( ) < 2 ) { buffer . append ( '0' ) ; } buffer . append ( minute ) ; if ( extended ) { buffer . append ( ':' ) ; } // append seconds String second = String . valueOf ( seconds ) ; if ( second . length ( ) < 2 ) { buffer . append ( '0' ) ; } buffer . append ( second ) ; } catch ( IOException e ) { throw new IllegalStateException ( e ) ; }
public class TargetsMngrImpl { /** * Private methods */ private void saveAssociation ( AbstractApplication app , String targetId , String instancePathOrComponentName , boolean add ) throws IOException { } }
// Association means an exact mapping between an application instance // and a target ID . InstanceContext key = new InstanceContext ( app , instancePathOrComponentName ) ; // Remove the old association , always . if ( instancePathOrComponentName != null ) { String oldTargetId = this . instanceToCachedId . remove ( key ) ; if ( oldTargetId != null ) { File associationFile = findTargetFile ( oldTargetId , TARGETS_ASSOC_FILE ) ; Properties props = Utils . readPropertiesFileQuietly ( associationFile , this . logger ) ; props . remove ( key . toString ( ) ) ; writeProperties ( props , associationFile ) ; } } // Register a potential new association and update the cache . if ( add ) { File associationFile = findTargetFile ( targetId , TARGETS_ASSOC_FILE ) ; if ( associationFile == null ) throw new IOException ( "Target " + targetId + " does not exist." ) ; Properties props = Utils . readPropertiesFileQuietly ( associationFile , this . logger ) ; props . setProperty ( key . toString ( ) , "" ) ; writeProperties ( props , associationFile ) ; this . instanceToCachedId . put ( key , targetId ) ; }
public class IPv6AddressSection { /** * Merges this with the list of sections to produce the smallest array of sequential block subnets , going from smallest to largest * @ param sections the sections to merge with this * @ return */ public IPv6AddressSection [ ] mergeToSequentialBlocks ( IPv6AddressSection ... sections ) throws SizeMismatchException { } }
List < IPAddressSegmentSeries > blocks = getMergedSequentialBlocks ( this , sections , true , createSeriesCreator ( getAddressCreator ( ) , getMaxSegmentValue ( ) ) ) ; return blocks . toArray ( new IPv6AddressSection [ blocks . size ( ) ] ) ;
public class AbstractComponent { /** * { @ inheritDoc } */ @ Override public final < WB extends WaveBean > Wave returnData ( final Class < ? extends Service > serviceClass , final WaveType waveType , final WB waveBean ) { } }
return sendWaveIntoJit ( createWave ( WaveGroup . RETURN_DATA , waveType , serviceClass , waveBean ) ) ;
public class MoneyAmountStyle { /** * Returns a { @ code MoneyAmountStyle } instance configured for the specified locale . * This method will return a new instance where each field that was defined * to be localized ( by being set to { @ code null } ) is filled in . * If this instance is fully defined ( with all fields non - null ) , then this * method has no effect . Once this method is called , no method will return null . * The settings for the locale are pulled from { @ link DecimalFormatSymbols } and * { @ link DecimalFormat } . * @ param locale the locale to use , not null * @ return the new instance for chaining , never null */ public MoneyAmountStyle localize ( Locale locale ) { } }
MoneyFormatter . checkNotNull ( locale , "Locale must not be null" ) ; MoneyAmountStyle result = this ; MoneyAmountStyle protoStyle = null ; if ( zeroCharacter < 0 ) { protoStyle = getLocalizedStyle ( locale ) ; result = result . withZeroCharacter ( protoStyle . getZeroCharacter ( ) ) ; } if ( positiveCharacter < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withPositiveSignCharacter ( protoStyle . getPositiveSignCharacter ( ) ) ; } if ( negativeCharacter < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withNegativeSignCharacter ( protoStyle . getNegativeSignCharacter ( ) ) ; } if ( decimalPointCharacter < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withDecimalPointCharacter ( protoStyle . getDecimalPointCharacter ( ) ) ; } if ( groupingCharacter < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withGroupingCharacter ( protoStyle . getGroupingCharacter ( ) ) ; } if ( groupingSize < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withGroupingSize ( protoStyle . getGroupingSize ( ) ) ; } if ( extendedGroupingSize < 0 ) { protoStyle = ( protoStyle == null ? getLocalizedStyle ( locale ) : protoStyle ) ; result = result . withExtendedGroupingSize ( protoStyle . getExtendedGroupingSize ( ) ) ; } return result ;
public class ControllerRunner { /** * Injects controller with dependencies from Guice module . */ private void injectController ( AppController controller ) { } }
Injector injector = Configuration . getInjector ( ) ; if ( injector != null ) { injector . injectMembers ( controller ) ; }
public class CCEncoder { /** * Returns the best at - most - one encoder for a given number of variables . The valuation is based on theoretical and * practical observations . For < = 10 the pure encoding without introduction of new variables is used , otherwise * the product encoding is chosen . * @ param n the number of variables * @ return the best at - most - one encoder */ private CCAtMostOne bestAMO ( int n ) { } }
if ( n <= 10 ) { if ( this . amoPure == null ) this . amoPure = new CCAMOPure ( ) ; return this . amoPure ; } else { if ( this . amoProduct == null ) this . amoProduct = new CCAMOProduct ( this . config ( ) . productRecursiveBound ) ; return this . amoProduct ; }
public class SqlLoaderImpl { /** * SQL名作成 < br > * @ param packageName パッケージ名 * @ param filePath ルートパスからの相対パス * @ return */ private String makeSqlName ( final StringBuilder packageName , final String filePath ) { } }
if ( packageName . length ( ) == 0 ) { return trimSqlExtension ( filePath ) ; } else { return new StringBuilder ( packageName ) . append ( PATH_SEPARATOR ) . append ( trimSqlExtension ( filePath ) ) . toString ( ) ; }
public class FnNumber { /** * Determines whether the result of executing the specified function * on the target object is greater than the specified object parameter * in value , this is , whether < tt > functionResult . compareTo ( object ) > 0 < / tt > . Both * the target and the specified object have to implement { @ link Comparable } . * @ param object the object to compare to the target * @ return true if function result is greater than the specified object , false if not */ public static final Function < Number , Boolean > greaterThanBy ( final IFunction < Number , ? > by , final Number object ) { } }
return FnObject . greaterThanBy ( by , object ) ;
public class mail_profile { /** * < pre > * Use this operation to modify mail profile . . * < / pre > */ public static mail_profile update ( nitro_service client , mail_profile resource ) throws Exception { } }
resource . validate ( "modify" ) ; return ( ( mail_profile [ ] ) resource . update_resource ( client ) ) [ 0 ] ;
public class CachesEndpoint { /** * Invalidates the cache . * @ param name The name of the cache to invalidate * @ return A maybe that emits a boolean if the operation was successful */ @ Delete public Maybe < Boolean > invalidateCache ( @ NotBlank @ Selector String name ) { } }
try { final SyncCache < Object > cache = cacheManager . getCache ( name ) ; return Maybe . create ( emitter -> cache . async ( ) . invalidateAll ( ) . whenComplete ( ( aBoolean , throwable ) -> { if ( throwable != null ) { emitter . onError ( throwable ) ; } else { emitter . onSuccess ( aBoolean ) ; emitter . onComplete ( ) ; } } ) ) ; } catch ( ConfigurationException e ) { // no cache return Maybe . empty ( ) ; }
public class ApiOvhTelephony { /** * Move a service of billing account . Source and destination nics should be the same . * REST : POST / telephony / { billingAccount } / service / { serviceName } / changeOfBillingAccount * @ param billingAccountDestination [ required ] Billing account destination target * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] */ public void billingAccount_service_serviceName_changeOfBillingAccount_POST ( String billingAccount , String serviceName , String billingAccountDestination ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/service/{serviceName}/changeOfBillingAccount" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "billingAccountDestination" , billingAccountDestination ) ; exec ( qPath , "POST" , sb . toString ( ) , o ) ;
public class MessageBirdClient { /** * Request a Lookup HLR ( lookup ) * @ param phoneNumber phone number is for request hlr * @ param reference reference for request hlr * @ return lookupHlr * @ throws UnauthorizedException if client is unauthorized * @ throws GeneralException general exception */ public LookupHlr requestLookupHlr ( final BigInteger phoneNumber , final String reference ) throws UnauthorizedException , GeneralException { } }
if ( phoneNumber == null ) { throw new IllegalArgumentException ( "Phonenumber must be specified." ) ; } if ( reference == null ) { throw new IllegalArgumentException ( "Reference must be specified." ) ; } final LookupHlr lookupHlr = new LookupHlr ( ) ; lookupHlr . setPhoneNumber ( phoneNumber ) ; lookupHlr . setReference ( reference ) ; return this . requestLookupHlr ( lookupHlr ) ;
public class PrometheusMetricsServlet { /** * Gets the { @ link ServletContextHandler } of the metrics servlet . * @ return the { @ link ServletContextHandler } object */ public ServletContextHandler getHandler ( ) { } }
ServletContextHandler contextHandler = new ServletContextHandler ( ) ; contextHandler . setContextPath ( SERVLET_PATH ) ; contextHandler . addServlet ( new ServletHolder ( new MetricsServlet ( mCollectorRegistry ) ) , "/" ) ; return contextHandler ;
public class JustifiedTextView { /** * Verifies if word to be added will fit into the sentence * @ param word Word to be added * @ param sentence Sentence that will receive the new word * @ param addSpaces Specifies weather we should add spaces to validation or not * @ return True if word fits , false otherwise . */ private boolean fitsInSentence ( String word , List < String > sentence , boolean addSpaces ) { } }
String stringSentence = getSentenceFromList ( sentence , addSpaces ) ; stringSentence += word ; float sentenceWidth = getPaint ( ) . measureText ( stringSentence ) ; return sentenceWidth < viewWidth ;
public class SoapServerActionBuilder { /** * Generic response builder for sending SOAP fault messages to client . * @ return */ public SoapServerFaultResponseActionBuilder sendFault ( ) { } }
SoapServerFaultResponseActionBuilder soapServerResponseActionBuilder = new SoapServerFaultResponseActionBuilder ( action , soapServer ) . withApplicationContext ( applicationContext ) ; return soapServerResponseActionBuilder ;
public class Postfach { /** * Liefert die Postfach - Nummer als normale Zahl . Da die Nummer optional * sein kann , wird sie als { @ link Optional } zurueckgeliefert . * @ return z . B . 815 */ public Optional < BigInteger > getNummer ( ) { } }
if ( nummer == null ) { return Optional . empty ( ) ; } else { return Optional . of ( nummer ) ; }
public class StringBasedAnnotationHandler { /** * replace any placeholder from the given strings * @ param strs strings to replace the placeholders from * @ return same string with the placeholders merged * @ see org . codegist . crest . CRest # placeholder ( String , String ) */ protected String [ ] ph ( String ... strs ) { } }
String [ ] res = new String [ strs . length ] ; for ( int i = 0 ; i < res . length ; i ++ ) { res [ i ] = ph ( strs [ i ] ) ; } return res ;
public class MoreGraphs { /** * Actual content of the topological sort . This is a breadth - first search based approach . * @ param graph the graph to be sorted * @ param type the sort type * @ param < T > the node type * @ return the sorted list * @ throws CyclePresentException if the graph has cycles * @ throws IllegalArgumentException if the graph is not directed or allows self loops */ private static < T > @ NonNull List < T > topologicalSort ( final @ NonNull Graph < T > graph , final @ NonNull SortType < T > type ) { } }
checkArgument ( graph . isDirected ( ) , "the graph must be directed" ) ; checkArgument ( ! graph . allowsSelfLoops ( ) , "the graph cannot allow self loops" ) ; final Map < T , Integer > requiredCounts = new HashMap < > ( ) ; for ( final T node : graph . nodes ( ) ) { for ( final T successor : graph . successors ( node ) ) { requiredCounts . merge ( successor , 1 , ( a , b ) -> a + b ) ; } } final Queue < T > processing = type . createQueue ( ) ; final List < T > results = new ArrayList < > ( ) ; for ( final T node : graph . nodes ( ) ) { if ( ! requiredCounts . containsKey ( node ) ) { processing . add ( node ) ; } } while ( ! processing . isEmpty ( ) ) { final T now = processing . poll ( ) ; for ( final T successor : graph . successors ( now ) ) { final int newCount = requiredCounts . get ( successor ) - 1 ; if ( newCount == 0 ) { processing . add ( successor ) ; requiredCounts . remove ( successor ) ; } else { requiredCounts . put ( successor , newCount ) ; } } results . add ( now ) ; } if ( ! requiredCounts . isEmpty ( ) ) { final StronglyConnectedComponentAnalyzer < T > analyzer = new StronglyConnectedComponentAnalyzer < > ( graph ) ; analyzer . analyze ( ) ; throw new CyclePresentException ( "Graph (" + graph + ") has cycle(s): " + analyzer . renderCycles ( ) , analyzer . components ( ) ) ; } return results ;
public class DescribeDeploymentsResult { /** * An array of < code > Deployment < / code > objects that describe the deployments . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDeployments ( java . util . Collection ) } or { @ link # withDeployments ( java . util . Collection ) } if you want to * override the existing values . * @ param deployments * An array of < code > Deployment < / code > objects that describe the deployments . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDeploymentsResult withDeployments ( Deployment ... deployments ) { } }
if ( this . deployments == null ) { setDeployments ( new com . amazonaws . internal . SdkInternalList < Deployment > ( deployments . length ) ) ; } for ( Deployment ele : deployments ) { this . deployments . add ( ele ) ; } return this ;
public class HelpFormatter { /** * Print the specified text to the specified PrintWriter . * @ param pw The printWriter to write the help to * @ param width The number of characters to display per line * @ param nextLineTabStop The position on the next line for the first tab . * @ param text The text to be written to the PrintWriter */ public void printWrapped ( PrintWriter pw , int width , int nextLineTabStop , String text ) { } }
StringBuffer sb = new StringBuffer ( text . length ( ) ) ; renderWrappedTextBlock ( sb , width , nextLineTabStop , text ) ; pw . println ( sb . toString ( ) ) ;
public class sslfipskey { /** * Use this API to Import sslfipskey resources . */ public static base_responses Import ( nitro_service client , sslfipskey resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { sslfipskey Importresources [ ] = new sslfipskey [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { Importresources [ i ] = new sslfipskey ( ) ; Importresources [ i ] . fipskeyname = resources [ i ] . fipskeyname ; Importresources [ i ] . key = resources [ i ] . key ; Importresources [ i ] . inform = resources [ i ] . inform ; Importresources [ i ] . wrapkeyname = resources [ i ] . wrapkeyname ; Importresources [ i ] . iv = resources [ i ] . iv ; Importresources [ i ] . exponent = resources [ i ] . exponent ; } result = perform_operation_bulk_request ( client , Importresources , "Import" ) ; } return result ;
public class CommerceOrderPersistenceImpl { /** * Returns a range of all the commerce orders where userId = & # 63 ; and createDate & lt ; & # 63 ; and orderStatus = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceOrderModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param userId the user ID * @ param createDate the create date * @ param orderStatus the order status * @ param start the lower bound of the range of commerce orders * @ param end the upper bound of the range of commerce orders ( not inclusive ) * @ return the range of matching commerce orders */ @ Override public List < CommerceOrder > findByU_LtC_O ( long userId , Date createDate , int orderStatus , int start , int end ) { } }
return findByU_LtC_O ( userId , createDate , orderStatus , start , end , null ) ;
public class DesignDocument { /** * Creates a new { @ link DesignDocument } . * @ param name the name of the design document . * @ param views all views it contains . * @ return a new { @ link DesignDocument } . */ public static DesignDocument create ( final String name , final List < View > views ) { } }
return create ( name , views , new HashMap < Option , Long > ( ) ) ;
public class OptionsHandler { /** * Build the representation for the given resource . * @ param builder a response builder * @ return the response builder */ public ResponseBuilder ldpOptions ( final ResponseBuilder builder ) { } }
LOGGER . debug ( "OPTIONS request for {}" , getIdentifier ( ) ) ; ldpResourceTypes ( getResource ( ) . getInteractionModel ( ) ) . forEach ( type -> builder . link ( type . getIRIString ( ) , "type" ) ) ; if ( isMemento || TIMEMAP . equals ( getRequest ( ) . getExt ( ) ) ) { // Mementos and TimeMaps are read - only builder . header ( ALLOW , join ( "," , GET , HEAD , OPTIONS ) ) ; } else { builder . header ( ACCEPT_PATCH , APPLICATION_SPARQL_UPDATE ) ; // ACL resources allow a limited set of methods ( no DELETE or POST ) // If it ' s not a container , POST isn ' t allowed if ( PreferAccessControl . equals ( graphName ) || getResource ( ) . getInteractionModel ( ) . equals ( RDFSource ) || getResource ( ) . getInteractionModel ( ) . equals ( NonRDFSource ) ) { builder . header ( ALLOW , join ( "," , GET , HEAD , OPTIONS , PATCH , PUT , DELETE ) ) ; } else { // Containers and binaries support POST builder . header ( ALLOW , join ( "," , GET , HEAD , OPTIONS , PATCH , PUT , DELETE , POST ) ) ; builder . header ( ACCEPT_POST , getServices ( ) . getIOService ( ) . supportedWriteSyntaxes ( ) . stream ( ) . map ( RDFSyntax :: mediaType ) . collect ( joining ( "," ) ) ) ; } } return builder ;
public class VFSUtils { /** * Copy all the children from the original { @ link VirtualFile } the target recursively . * @ param original the file to copy children from * @ param target the file to copy the children to * @ throws IOException if any problems occur copying the files */ public static void copyChildrenRecursive ( VirtualFile original , VirtualFile target ) throws IOException { } }
if ( original == null ) { throw MESSAGES . nullArgument ( "Original VirtualFile" ) ; } if ( target == null ) { throw MESSAGES . nullArgument ( "Target VirtualFile" ) ; } List < VirtualFile > children = original . getChildren ( ) ; for ( VirtualFile child : children ) { VirtualFile targetChild = target . getChild ( child . getName ( ) ) ; File childFile = child . getPhysicalFile ( ) ; if ( childFile . isDirectory ( ) ) { if ( ! targetChild . getPhysicalFile ( ) . mkdir ( ) ) { throw MESSAGES . problemCreatingNewDirectory ( targetChild ) ; } copyChildrenRecursive ( child , targetChild ) ; } else { FileInputStream is = new FileInputStream ( childFile ) ; writeFile ( targetChild , is ) ; } }
public class ConcurrentCacheAtom { /** * { @ inheritDoc } */ @ Override public void set ( V value ) { } }
writeLock ( ) ; V oldValue = setValueInsideWriteLock ( value ) ; writeUnlock ( ) ; releaseOldValue ( oldValue ) ; // release the old value - outside of lock
public class MetricContext { /** * Submit { @ link org . apache . gobblin . metrics . GobblinTrackingEvent } to all notification listeners attached to this or any * ancestor { @ link org . apache . gobblin . metrics . MetricContext } s . The argument for this method is mutated by the method , so it * should not be reused by the caller . * @ param nonReusableEvent { @ link GobblinTrackingEvent } to submit . This object will be mutated by the method , * so it should not be reused by the caller . */ public void submitEvent ( GobblinTrackingEvent nonReusableEvent ) { } }
nonReusableEvent . setTimestamp ( System . currentTimeMillis ( ) ) ; injectTagsToEvent ( nonReusableEvent ) ; EventNotification notification = new EventNotification ( nonReusableEvent ) ; sendNotification ( notification ) ;
public class EventArgument { /** * setter for refid - sets * @ generated * @ param v value to set into the feature */ public void setRefid ( String v ) { } }
if ( EventArgument_Type . featOkTst && ( ( EventArgument_Type ) jcasType ) . casFeat_refid == null ) jcasType . jcas . throwFeatMissing ( "refid" , "de.julielab.jules.types.ace.EventArgument" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( EventArgument_Type ) jcasType ) . casFeatCode_refid , v ) ;
public class NextClient { /** * CLIENT PARAMS AND HEADERS */ public String getParam ( final String key ) { } }
AssertUtils . notEmpty ( key , "key must not be null or empty." ) ; return mParams . get ( key ) ;
public class Platform { /** * Starts level 3 and 4 */ private void startLevel34Containers ( ) { } }
level3 = start ( new PlatformLevel3 ( level2 ) ) ; level4 = start ( new PlatformLevel4 ( level3 , level4AddedComponents ) ) ;
public class DualDecomposition { /** * Perform a single subgradient step , updating { @ code factors } and * { @ code unaryFactors } with the computed subgradient . * @ param factors * @ param unaryFactors * @ param stepSize * @ return */ private int gradientUpdate ( List < TensorBuilder > factors , List < TensorBuilder > unaryFactors , double stepSize ) { } }
// The best assignment , as computed from only the unary factors . int [ ] variableNums = new int [ unaryFactors . size ( ) ] ; int [ ] variableSizes = new int [ unaryFactors . size ( ) ] ; int [ ] variableValues = new int [ unaryFactors . size ( ) ] ; locallyDecodeFactors ( unaryFactors , variableNums , variableSizes , variableValues ) ; // Identify where unary factors disagree with larger factors to compute the // subgradient . List < Integer > variableDisagreementList = Lists . newArrayList ( ) ; List < Integer > factorDisagreementList = Lists . newArrayList ( ) ; List < Integer > factorValueList = Lists . newArrayList ( ) ; for ( int i = 0 ; i < factors . size ( ) ; i ++ ) { TensorBuilder factor = factors . get ( i ) ; int [ ] bestFactorValues = factor . keyNumToDimKey ( factor . getLargestValues ( 1 ) [ 0 ] ) ; int [ ] factorVariableNums = factor . getDimensionNumbers ( ) ; for ( int j = 0 ; j < factorVariableNums . length ; j ++ ) { int index = Arrays . binarySearch ( variableNums , factorVariableNums [ j ] ) ; if ( bestFactorValues [ j ] != variableValues [ index ] ) { // Factor and unary assignments disagree on variableNums [ index ] . // Store the disagreement , used later to compute the subgradient . variableDisagreementList . add ( index ) ; factorDisagreementList . add ( i ) ; factorValueList . add ( bestFactorValues [ j ] ) ; /* System . out . println ( " Disagreement " + Arrays . toString ( factorVariableNums ) + " = " + Arrays . toString ( bestFactorValues ) + " , " + variableNums [ index ] + " = " + variableValues [ index ] ) ; */ } } } // Update the factor lagrange multipliers based on the disagreements . // For each disagreement , the subgradient update decreases the weight of // each factor ' s maximum weight assignment , and increases the weight of // the disagreeing factor ' s maximum weight assignment . for ( int i = 0 ; i < variableDisagreementList . size ( ) ; i ++ ) { int variableIndex = variableDisagreementList . get ( i ) ; int factorNum = factorDisagreementList . get ( i ) ; TensorBuilder factor = factors . get ( factorNum ) ; TensorBuilder unaryFactor = unaryFactors . get ( variableIndex ) ; int variableNum = variableNums [ variableIndex ] ; int variableSize = variableSizes [ variableIndex ] ; int bestFactorValue = factorValueList . get ( i ) ; int bestUnaryValue = variableValues [ variableIndex ] ; SparseTensor unaryGradient = SparseTensor . singleElement ( new int [ ] { variableNum } , new int [ ] { variableSize } , new int [ ] { bestUnaryValue } , 1 ) ; SparseTensor factorGradient = SparseTensor . singleElement ( new int [ ] { variableNum } , new int [ ] { variableSize } , new int [ ] { bestFactorValue } , 1 ) ; unaryFactor . incrementWithMultiplier ( unaryGradient , - 1.0 * stepSize ) ; unaryFactor . incrementWithMultiplier ( factorGradient , stepSize ) ; factor . incrementWithMultiplier ( unaryGradient , stepSize ) ; factor . incrementWithMultiplier ( factorGradient , - 1.0 * stepSize ) ; } return variableDisagreementList . size ( ) ;
public class DefaultMessageTranslator { /** * Visit function for compound constraints like And / Or / XOr . * < p > syntax : < code > CONSTRAINT MESSAGE { code } CONSTRAINT < / code > < / p > * @ param compoundConstraint */ protected void visit ( CompoundConstraint compoundConstraint ) { } }
Iterator it = compoundConstraint . iterator ( ) ; String compoundMessage = getMessageCode ( compoundConstraint ) ; while ( it . hasNext ( ) ) { Constraint p = ( Constraint ) it . next ( ) ; visitorSupport . invokeVisit ( this , p ) ; if ( it . hasNext ( ) ) { add ( compoundMessage , null , compoundMessage ) ; } }
public class SVNLogFileParser { /** * Internal method used for XML parsing */ @ Override public void startElement ( String uri , String localName , String qName , Attributes attributes ) throws SAXException { } }
if ( localName . equals ( TAG_LOG_ENTRY ) ) { if ( currentTags != null ) { throw new IllegalStateException ( "Should not have tags when a config starts in the XML, but had: " + currentTags ) ; } currentTags = new LogEntry ( ) ; if ( attributes . getValue ( TAG_REVISION ) != null ) { currentTags . revision = Long . parseLong ( attributes . getValue ( TAG_REVISION ) ) ; } } else if ( localName . equals ( TAG_PATH ) ) { lastAction = attributes . getValue ( TAG_PATH_ACTION ) ; }
public class GlobalUsersInner { /** * Stops an environment by stopping all resources inside the environment This operation can take a while to complete . * @ param userName The name of the user . * @ param environmentId The resourceId of the environment * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void stopEnvironment ( String userName , String environmentId ) { } }
stopEnvironmentWithServiceResponseAsync ( userName , environmentId ) . toBlocking ( ) . last ( ) . body ( ) ;
public class RLSControllerImpl { /** * Cancels the corresponding suspend operation , identified by the supplied token byte array . * If there are no outstanding suspend operation , then resumes i / o to the recovery log files . * @ param tokenBytes a byte array representation of the RLSSuspendToken , identifying the * corresponding suspend operation to cancel * @ exception RLSInvalidSuspendTokenException Thrown if the token byte array is null , invalid or has expired */ public void resume ( byte [ ] tokenBytes ) throws RLSInvalidSuspendTokenException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "resume" , RLSUtils . toHexString ( tokenBytes ) ) ; if ( Configuration . isZOS ( ) ) { if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Operation not supported on ZOS - throwing UnsupportedOperationException" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resume" , "java.lang.UnsupportedOperationException" ) ; throw new UnsupportedOperationException ( ) ; } // RLSSuspendTokenImpl token = new RLSSuspendTokenImpl ( tokenBytes ) ; RLSSuspendToken token = Configuration . getRecoveryLogComponent ( ) . createRLSSuspendToken ( tokenBytes ) ; RLSControllerImpl . resumeRLS ( token ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resume" ) ;
public class ParameterSerializer { /** * Serialize a java object to a SFSObject * @ param value value to parse * @ param method structure of getter method * @ param sfsObject the SFSObject */ @ SuppressWarnings ( "rawtypes" ) protected void parseMethod ( Object value , GetterMethodCover method , ISFSObject sfsObject ) { } }
Object answer = value ; if ( method . isColection ( ) ) { answer = parseCollection ( method , ( Collection ) value ) ; } else if ( method . isTwoDimensionsArray ( ) ) { answer = parseTwoDimensionsArray ( method , value ) ; } else if ( method . isArray ( ) ) { answer = parseArray ( method , value ) ; } else if ( method . isObject ( ) ) { answer = parseObject ( method , value ) ; } else if ( method . isChar ( ) ) { answer = ( byte ) ( ( ( Character ) value ) . charValue ( ) ) ; } SFSDataType type = getSFSDataType ( method ) ; sfsObject . put ( method . getKey ( ) , new SFSDataWrapper ( type , answer ) ) ;
public class Reflecter { /** * Determines whether the delegate has a default constructor */ public boolean hasDefaultConstructor ( ) { } }
if ( ! delegate . isPresent ( ) ) { return false ; } final Constructor < ? > [ ] constructors = delegateClass ( ) . getConstructors ( ) ; for ( final Constructor < ? > constructor : constructors ) { if ( constructor . getParameterTypes ( ) . length == 0 ) { return true ; } } return false ;
public class DefaultStateMachineConfigCache { /** * See also DefaultTenantUserApi - we use the same conventions as the main XML cache ( so we can re - use the invalidation code ) */ private String getCacheKeyName ( final String pluginName , final InternalTenantContext internalContext ) { } }
final StringBuilder tenantKey = new StringBuilder ( TenantKey . PLUGIN_PAYMENT_STATE_MACHINE_ . toString ( ) ) ; tenantKey . append ( pluginName ) ; tenantKey . append ( CacheControllerDispatcher . CACHE_KEY_SEPARATOR ) ; tenantKey . append ( internalContext . getTenantRecordId ( ) ) ; return tenantKey . toString ( ) ;
public class BasicSchemaSpecification { /** * Determines if the policy is satisfied by the supplied LdapAttributes object . * @ throws NamingException */ public boolean isSatisfiedBy ( LdapAttributes record ) throws NamingException { } }
if ( record != null ) { // DN is required . LdapName dn = record . getName ( ) ; if ( dn != null ) { // objectClass definition is required . if ( record . get ( "objectClass" ) != null ) { // Naming attribute is required . Rdn rdn = dn . getRdn ( dn . size ( ) - 1 ) ; if ( record . get ( rdn . getType ( ) ) != null ) { Object object = record . get ( rdn . getType ( ) ) . get ( ) ; if ( object instanceof String ) { String value = ( String ) object ; if ( ( ( String ) rdn . getValue ( ) ) . equalsIgnoreCase ( value ) ) { return true ; } } else if ( object instanceof byte [ ] ) { String rdnValue = LdapEncoder . printBase64Binary ( ( ( String ) rdn . getValue ( ) ) . getBytes ( ) ) ; String attributeValue = LdapEncoder . printBase64Binary ( ( byte [ ] ) object ) ; if ( rdnValue . equals ( attributeValue ) ) return true ; } } } }
public class ReadStream { /** * Close the stream . */ @ Override public final void close ( ) { } }
try { TempBufferData tempBuffer = _tempRead ; if ( tempBuffer != null && ! _isReuseBuffer ) { _tempRead = null ; _readBuffer = null ; tempBuffer . free ( ) ; } if ( _source != null ) { StreamImpl s = _source ; _source = null ; s . close ( ) ; } } catch ( IOException e ) { log . log ( Level . FINE , e . toString ( ) , e ) ; }
public class EigenPowerMethod_DDRM { /** * Computes the most dominant eigen vector of A using a shifted matrix . * The shifted matrix is defined as < b > B = A - & alpha ; I < / b > and can converge faster * if & alpha ; is chosen wisely . In general it is easier to choose a value for & alpha ; * that will converge faster with the shift - invert strategy than this one . * @ param A The matrix . * @ param alpha Shifting factor . * @ return If it converged or not . */ public boolean computeShiftDirect ( DMatrixRMaj A , double alpha ) { } }
SpecializedOps_DDRM . addIdentity ( A , B , - alpha ) ; return computeDirect ( B ) ;
public class ArrayMath { /** * Takes a pair of arrays , A and B , which represent corresponding * outcomes of a pair of random variables : say , results for two different * classifiers on a sequence of inputs . Returns the estimated * probability that the difference between the means of A and B is not * significant , that is , the significance level . This is computed by * " approximate randomization " . The test statistic is the absolute * difference between the means of the two arrays . A randomized test * statistic is computed the same way after initially randomizing the * arrays by swapping each pair of elements with 50 % probability . For * the given number of iterations , we generate a randomized test * statistic and compare it to the actual test statistic . The return * value is the proportion of iterations in which a randomized test * statistic was found to exceed the actual test statistic . * @ param A Outcome of one r . v . * @ param B Outcome of another r . v . * @ return Significance level by randomization */ public static double sigLevelByApproxRand ( double [ ] A , double [ ] B , int iterations ) { } }
if ( A . length == 0 ) throw new IllegalArgumentException ( "Input arrays must not be empty!" ) ; if ( A . length != B . length ) throw new IllegalArgumentException ( "Input arrays must have equal length!" ) ; if ( iterations <= 0 ) throw new IllegalArgumentException ( "Number of iterations must be positive!" ) ; double testStatistic = absDiffOfMeans ( A , B , false ) ; // not randomized int successes = 0 ; for ( int i = 0 ; i < iterations ; i ++ ) { double t = absDiffOfMeans ( A , B , true ) ; // randomized if ( t >= testStatistic ) successes ++ ; } return ( double ) ( successes + 1 ) / ( double ) ( iterations + 1 ) ;
public class TimestampInequalityRule { /** * { @ inheritDoc } */ public boolean evaluate ( final LoggingEvent event , Map matches ) { } }
String eventTimeStampString = RESOLVER . getValue ( LoggingEventFieldResolver . TIMESTAMP_FIELD , event ) . toString ( ) ; long eventTimeStamp = Long . parseLong ( eventTimeStampString ) / 1000 * 1000 ; boolean result = false ; long first = eventTimeStamp ; long second = timeStamp ; if ( "<" . equals ( inequalitySymbol ) ) { result = first < second ; } else if ( ">" . equals ( inequalitySymbol ) ) { result = first > second ; } else if ( "<=" . equals ( inequalitySymbol ) ) { result = first <= second ; } else if ( ">=" . equals ( inequalitySymbol ) ) { result = first >= second ; } if ( result && matches != null ) { Set entries = ( Set ) matches . get ( LoggingEventFieldResolver . TIMESTAMP_FIELD ) ; if ( entries == null ) { entries = new HashSet ( ) ; matches . put ( LoggingEventFieldResolver . TIMESTAMP_FIELD , entries ) ; } entries . add ( eventTimeStampString ) ; } return result ;
public class UnmodifiableSortedSet { /** * This method will take a MutableSortedSet and wrap it directly in a UnmodifiableSortedSet . It will * take any other non - GS - SortedSet and first adapt it will a SortedSetAdapter , and then return a * UnmodifiableSortedSet that wraps the adapter . */ public static < E , S extends SortedSet < E > > UnmodifiableSortedSet < E > of ( S set ) { } }
if ( set == null ) { throw new IllegalArgumentException ( "cannot create an UnmodifiableSortedSet for null" ) ; } return new UnmodifiableSortedSet < E > ( SortedSetAdapter . adapt ( set ) ) ;
public class ArchiveExtractor { /** * extract image layers */ public void extractDockerImageLayers ( File imageTarFile , File imageExtractionDir , Boolean deleteTarFiles ) { } }
FilesScanner filesScanner = new FilesScanner ( ) ; boolean success = false ; // docker layers are saved as TAR file ( we save it as TAR ) if ( imageTarFile . getName ( ) . endsWith ( TAR_SUFFIX ) ) { success = unTar ( imageTarFile . getName ( ) . toLowerCase ( ) , imageExtractionDir . getAbsolutePath ( ) , imageTarFile . getPath ( ) ) ; boolean deleted = false ; if ( deleteTarFiles ) { deleted = imageTarFile . delete ( ) ; } if ( ! deleted ) { logger . warn ( "Was not able to delete {} (docker image TAR file)" , imageTarFile . getName ( ) ) ; } } if ( success ) { String [ ] fileNames = filesScanner . getDirectoryContent ( imageExtractionDir . getAbsolutePath ( ) , new String [ ] { LAYER_TAR } , new String [ ] { } , true , false ) ; for ( String filename : fileNames ) { File layerToExtract = new File ( imageExtractionDir + File . separator + filename ) ; extractDockerImageLayers ( layerToExtract , layerToExtract . getParentFile ( ) , deleteTarFiles ) ; } } else { logger . warn ( "Was not able to extract {} (docker image TAR file)" , imageTarFile . getName ( ) ) ; }
public class WebGL10 { /** * < p > { @ code glBlendFuncSeparate } defines the operation of blending when it is enabled . { @ code srcRGB } specifies * which method is used to scale the source RGB - color components . { @ code dstRGB } specifies which method is used to * scale the destination RGB - color components . Likewise , { @ code srcAlpha } specifies which method is used to scale * the source alpha color component , and { @ code dstAlpha } specifies which method is used to scale the destination * alpha component . < / p > * < p > { @ link # GL _ INVALID _ ENUM } is generated if { @ code srcRGB } , { @ code dstRGB } , { @ code srcAlpha } , or { @ code dstAlpha } * is not an accepted value . < / p > * @ param srcRGB Specifies how the red , green , and blue blending factors are computed . The following symbolic * constants are accepted : { @ link # GL _ ZERO } , { @ link # GL _ ONE } , { @ link # GL _ SRC _ COLOR } , { @ link * # GL _ ONE _ MINUS _ SRC _ COLOR } , { @ link # GL _ DST _ COLOR } , { @ link # GL _ ONE _ MINUS _ DST _ COLOR } , { @ link * # GL _ SRC _ ALPHA } , { @ link # GL _ ONE _ MINUS _ SRC _ ALPHA } , { @ link # GL _ DST _ ALPHA } , { @ link * # GL _ ONE _ MINUS _ DST _ ALPHA } , { @ link # GL _ CONSTANT _ COLOR } , { @ link # GL _ ONE _ MINUS _ CONSTANT _ COLOR } , * { @ link # GL _ CONSTANT _ ALPHA } , { @ link # GL _ ONE _ MINUS _ CONSTANT _ ALPHA } , and { @ link * # GL _ SRC _ ALPHA _ SATURATE } . The initial value is { @ link # GL _ ONE } . * @ param dstRGB Specifies how the red , green , and blue destination blending factors are computed . The following * symbolic constants are accepted : { @ link # GL _ ZERO } , { @ link # GL _ ONE } , { @ link # GL _ SRC _ COLOR } , { @ link * # GL _ ONE _ MINUS _ SRC _ COLOR } , { @ link # GL _ DST _ COLOR } , { @ link # GL _ ONE _ MINUS _ DST _ COLOR } , { @ link * # GL _ SRC _ ALPHA } , { @ link # GL _ ONE _ MINUS _ SRC _ ALPHA } , { @ link # GL _ DST _ ALPHA } , { @ link * # GL _ ONE _ MINUS _ DST _ ALPHA } . { @ link # GL _ CONSTANT _ COLOR } , { @ link # GL _ ONE _ MINUS _ CONSTANT _ COLOR } , * { @ link # GL _ CONSTANT _ ALPHA } , and { @ link # GL _ ONE _ MINUS _ CONSTANT _ ALPHA } . The initial value is { @ link * # GL _ ZERO } . * @ param srcAlpha Specified how the alpha source blending factor is computed . The same symbolic constants are * accepted as for srcRGB . The initial value is { @ link # GL _ ONE } . * @ param dstAlpha Specified how the alpha destination blending factor is computed . The same symbolic constants are * accepted as for dstRGB . The initial value is { @ link # GL _ ZERO } . */ public static void glBlendFuncSeparate ( int srcRGB , int dstRGB , int srcAlpha , int dstAlpha ) { } }
checkContextCompatibility ( ) ; nglBlendFuncSeparate ( srcRGB , dstRGB , srcAlpha , dstAlpha ) ;
public class WebAuthenticatorProxy { /** * { @ inheritDoc } */ @ Override public AuthenticationResult authenticate ( WebRequest webRequest ) { } }
AuthenticationResult authResult = providerAuthenticatorProxy . authenticate ( webRequest ) ; authResult . setTargetRealm ( authResult . realm ) ; String authType = webRequest . getLoginConfig ( ) . getAuthenticationMethod ( ) ; if ( ( authResult . getStatus ( ) == AuthResult . CONTINUE ) && ( ! webRequest . isContinueAfterUnprotectedURI ( ) ) ) { return new AuthenticationResult ( AuthResult . SUCCESS , "Successuful authentication for unprotectedURI." ) ; } if ( ( authResult . getStatus ( ) == AuthResult . CONTINUE ) ) { WebAuthenticator authenticator = getWebAuthenticator ( webRequest ) ; if ( authenticator == null ) { return new AuthenticationResult ( AuthResult . FAILURE , "Unable to get the appropriate WebAuthenticator. Unable to get the appropriate WebAuthenticator." ) ; } authResult = authenticator . authenticate ( webRequest ) ; if ( authenticator instanceof CertificateLoginAuthenticator && authResult != null && authResult . getStatus ( ) != AuthResult . SUCCESS && webAppSecurityConfig . allowFailOver ( ) && ! webRequest . isDisableClientCertFailOver ( ) ) { extraAuditData . put ( AuditConstants . ORIGINAL_AUTH_TYPE , authType ) ; authType = getFailOverToAuthType ( webRequest ) ; extraAuditData . put ( AuditConstants . FAILOVER_AUTH_TYPE , authType ) ; authenticator = getAuthenticatorForFailOver ( authType , webRequest ) ; WebReply reply = new DenyReply ( "AuthenticationFailed" ) ; Audit . audit ( Audit . EventID . SECURITY_AUTHN_01 , webRequest , authResult , Integer . valueOf ( reply . getStatusCode ( ) ) ) ; if ( authenticator == null ) { return new AuthenticationResult ( AuthResult . FAILURE , "Unable to get the failover WebAuthenticator. Unable to authenticate request." ) ; } else { authResult = authenticator . authenticate ( webRequest ) ; if ( authResult != null && authResult . getStatus ( ) == AuthResult . SUCCESS ) { Audit . audit ( Audit . EventID . SECURITY_AUTHN_FAILOVER_01 , webRequest , authResult , extraAuditData , Integer . valueOf ( HttpServletResponse . SC_OK ) ) ; } else { Audit . audit ( Audit . EventID . SECURITY_AUTHN_FAILOVER_01 , webRequest , authResult , extraAuditData , Integer . valueOf ( reply . getStatusCode ( ) ) ) ; } } } } if ( authResult != null && authResult . getStatus ( ) == AuthResult . SUCCESS ) { SRTServletRequestUtils . setPrivateAttribute ( webRequest . getHttpServletRequest ( ) , AUTH_TYPE , authType ) ; if ( LoginConfiguration . FORM . equalsIgnoreCase ( authType ) ) { postParameterHelper . restore ( webRequest . getHttpServletRequest ( ) , webRequest . getHttpServletResponse ( ) ) ; } } return authResult ;
public class RobotoTypefaces { /** * Obtain typeface . * @ param context The Context the widget is running in , through which it can access the current theme , resources , etc . * @ param fontFamily The value of " robotoFontFamily " attribute * @ param textWeight The value of " robotoTextWeight " attribute * @ param textStyle The value of " robotoTextStyle " attribute * @ return specify { @ link Typeface } or throws IllegalArgumentException . */ @ NonNull public static Typeface obtainTypeface ( @ NonNull Context context , @ RobotoFontFamily int fontFamily , @ RobotoTextWeight int textWeight , @ RobotoTextStyle int textStyle ) { } }
@ RobotoTypeface int typeface ; if ( fontFamily == FONT_FAMILY_ROBOTO ) { if ( textStyle == TEXT_STYLE_NORMAL ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_REGULAR ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_THIN ; break ; case TEXT_WEIGHT_LIGHT : typeface = TYPEFACE_ROBOTO_LIGHT ; break ; case TEXT_WEIGHT_MEDIUM : typeface = TYPEFACE_ROBOTO_MEDIUM ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_BOLD ; break ; case TEXT_WEIGHT_ULTRA_BOLD : typeface = TYPEFACE_ROBOTO_BLACK ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else if ( textStyle == TEXT_STYLE_ITALIC ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_ITALIC ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_THIN_ITALIC ; break ; case TEXT_WEIGHT_LIGHT : typeface = TYPEFACE_ROBOTO_LIGHT_ITALIC ; break ; case TEXT_WEIGHT_MEDIUM : typeface = TYPEFACE_ROBOTO_MEDIUM_ITALIC ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_BOLD_ITALIC ; break ; case TEXT_WEIGHT_ULTRA_BOLD : typeface = TYPEFACE_ROBOTO_BLACK_ITALIC ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else { throw new IllegalArgumentException ( "`robotoTextStyle` attribute value " + textStyle + " is not supported for this fontFamily " + fontFamily ) ; } } else if ( fontFamily == FONT_FAMILY_ROBOTO_CONDENSED ) { if ( textStyle == TEXT_STYLE_NORMAL ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_CONDENSED_REGULAR ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_CONDENSED_LIGHT ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_CONDENSED_BOLD ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else if ( textStyle == TEXT_STYLE_ITALIC ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_CONDENSED_ITALIC ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_CONDENSED_LIGHT_ITALIC ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_CONDENSED_BOLD_ITALIC ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else { throw new IllegalArgumentException ( "`robotoTextStyle` attribute value " + textStyle + " is not supported for this fontFamily " + fontFamily ) ; } } else if ( fontFamily == FONT_FAMILY_ROBOTO_SLAB ) { if ( textStyle == TEXT_STYLE_NORMAL ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_SLAB_REGULAR ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_SLAB_THIN ; break ; case TEXT_WEIGHT_LIGHT : typeface = TYPEFACE_ROBOTO_SLAB_LIGHT ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_SLAB_BOLD ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else { throw new IllegalArgumentException ( "`robotoTextStyle` attribute value " + textStyle + " is not supported for this fontFamily " + fontFamily ) ; } } else if ( fontFamily == FONT_FAMILY_ROBOTO_MONO ) { if ( textStyle == TEXT_STYLE_NORMAL ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_MONO_REGULAR ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_MONO_THIN ; break ; case TEXT_WEIGHT_LIGHT : typeface = TYPEFACE_ROBOTO_MONO_LIGHT ; break ; case TEXT_WEIGHT_MEDIUM : typeface = TYPEFACE_ROBOTO_MONO_MEDIUM ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_MONO_BOLD ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else if ( textStyle == TEXT_STYLE_ITALIC ) { switch ( textWeight ) { case TEXT_WEIGHT_NORMAL : typeface = TYPEFACE_ROBOTO_MONO_ITALIC ; break ; case TEXT_WEIGHT_THIN : typeface = TYPEFACE_ROBOTO_MONO_THIN_ITALIC ; break ; case TEXT_WEIGHT_LIGHT : typeface = TYPEFACE_ROBOTO_MONO_LIGHT_ITALIC ; break ; case TEXT_WEIGHT_MEDIUM : typeface = TYPEFACE_ROBOTO_MONO_MEDIUM_ITALIC ; break ; case TEXT_WEIGHT_BOLD : typeface = TYPEFACE_ROBOTO_MONO_BOLD_ITALIC ; break ; default : throw new IllegalArgumentException ( "`robotoTextWeight` attribute value " + textWeight + " is not supported for this fontFamily " + fontFamily + " and textStyle " + textStyle ) ; } } else { throw new IllegalArgumentException ( "`robotoTextStyle` attribute value " + textStyle + " is not supported for this fontFamily " + fontFamily ) ; } } else { throw new IllegalArgumentException ( "Unknown `robotoFontFamily` attribute value " + fontFamily ) ; } return obtainTypeface ( context , typeface ) ;
public class JdbcClient { @ Override public void cancelJob ( int idJob ) { } }
jqmlogger . trace ( "Job instance number " + idJob + " will be cancelled" ) ; DbConn cnx = null ; try { cnx = getDbSession ( ) ; QueryResult res = cnx . runUpdate ( "jj_update_cancel_by_id" , idJob ) ; if ( res . nbUpdated != 1 ) { throw new JqmClientException ( "the job is already running, has already finished or never existed to begin with" ) ; } } catch ( RuntimeException e ) { closeQuietly ( cnx ) ; throw e ; } try { History . create ( cnx , idJob , State . CANCELLED , null ) ; JobInstance . delete_id ( cnx , idJob ) ; cnx . commit ( ) ; } catch ( Exception e ) { throw new JqmClientException ( "could not historise the job instance after it was cancelled" , e ) ; } finally { closeQuietly ( cnx ) ; }
public class WTable { /** * Calculate the row ids of a row ' s children . * @ param rows the list of row ids * @ param row the current row * @ param model the table model * @ param parent the row ' s parent * @ param expanded the set of expanded rows * @ param mode the table expand mode * @ param forUpdate true if building list of row ids to update * @ param editable true if the table is editable */ @ SuppressWarnings ( "checkstyle:parameternumber" ) private void calcChildrenRowIds ( final List < RowIdWrapper > rows , final RowIdWrapper row , final TableModel model , final RowIdWrapper parent , final Set < ? > expanded , final ExpandMode mode , final boolean forUpdate , final boolean editable ) { } }
// Add row rows . add ( row ) ; // Add to parent if ( parent != null ) { parent . addChild ( row ) ; } List < Integer > rowIndex = row . getRowIndex ( ) ; // If row has a renderer , then dont need to process its children ( should not have any anyway as it is a " leaf " ) if ( model . getRendererClass ( rowIndex ) != null ) { return ; } // Check row is expandable if ( ! model . isExpandable ( rowIndex ) ) { return ; } // Check has children if ( ! model . hasChildren ( rowIndex ) ) { return ; } row . setHasChildren ( true ) ; // Always add children if CLIENT mode or row is expanded boolean addChildren = ( mode == ExpandMode . CLIENT ) || ( expanded != null && expanded . contains ( row . getRowKey ( ) ) ) ; if ( ! addChildren ) { return ; } // Get actual child count int children = model . getChildCount ( rowIndex ) ; if ( children == 0 ) { // Could be there are no children even though hasChildren returned true row . setHasChildren ( false ) ; return ; } // Render mode , Keep rows that have been expanded ( only if table editable ) if ( ! forUpdate && editable ) { addPrevExpandedRow ( row . getRowKey ( ) ) ; } // Add children by processing each child row for ( int i = 0 ; i < children ; i ++ ) { // Add next level List < Integer > nextRow = new ArrayList < > ( row . getRowIndex ( ) ) ; nextRow . add ( i ) ; // Create Wrapper Object key = model . getRowKey ( nextRow ) ; RowIdWrapper wrapper = new RowIdWrapper ( nextRow , key , row ) ; calcChildrenRowIds ( rows , wrapper , model , row , expanded , mode , forUpdate , editable ) ; }
public class JCompositeRowsSelectorPanel { /** * GEN - LAST : event _ jButtonRemoveActionPerformed */ private void jTreeGraph1MouseClicked ( java . awt . event . MouseEvent evt ) // GEN - FIRST : event _ jTreeGraph1MouseClicked { } }
// GEN - HEADEREND : event _ jTreeGraph1MouseClicked if ( evt . getClickCount ( ) == 2 ) { TreePath [ ] paths = jTreeGraph1 . getSelectionPaths ( ) ; if ( paths != null && paths . length == 1 ) { if ( paths [ 0 ] . getPath ( ) . length == 3 ) { addItemsToComposite ( paths ) ; } } }
public class vpntrafficaction { /** * Use this API to update vpntrafficaction . */ public static base_response update ( nitro_service client , vpntrafficaction resource ) throws Exception { } }
vpntrafficaction updateresource = new vpntrafficaction ( ) ; updateresource . name = resource . name ; updateresource . apptimeout = resource . apptimeout ; updateresource . sso = resource . sso ; updateresource . formssoaction = resource . formssoaction ; updateresource . fta = resource . fta ; updateresource . wanscaler = resource . wanscaler ; updateresource . kcdaccount = resource . kcdaccount ; updateresource . samlssoprofile = resource . samlssoprofile ; return updateresource . update_resource ( client ) ;
public class AbstractParentCommandNode { /** * Returns the template ' s first child node that matches the given condition . */ @ Nullable public SoyNode firstChildThatMatches ( Predicate < SoyNode > condition ) { } }
int firstChildIndex = 0 ; while ( firstChildIndex < numChildren ( ) && ! condition . test ( getChild ( firstChildIndex ) ) ) { firstChildIndex ++ ; } if ( firstChildIndex < numChildren ( ) ) { return getChild ( firstChildIndex ) ; } return null ;
public class ApiErrorExtractor { /** * Determine if a given Throwable is caused by an IO error . * Recursively checks getCause ( ) if outer exception isn ' t an instance of the * correct class . * @ param throwable The Throwable to check . * @ return True if the Throwable is a result of an IO error . */ public boolean ioError ( Throwable throwable ) { } }
if ( throwable instanceof IOException || throwable instanceof IOError ) { return true ; } return throwable . getCause ( ) != null && ioError ( throwable . getCause ( ) ) ;
public class Composite { /** * Add a { @ link Paintable } to this composite . * @ param p * the < code > Paintable < / code > to add to this composite */ public void addChild ( Paintable p ) { } }
if ( null == p ) { throw new IllegalArgumentException ( "Please provide a paintable." ) ; } if ( this . equals ( p ) ) { throw new IllegalArgumentException ( "Cannot add itself as a child." ) ; } if ( children . contains ( p ) ) { return ; } // nothing changes , no exception if ( p instanceof Composite && ( ( Composite ) p ) . contains ( this ) ) { throw new IllegalArgumentException ( "Cannot add this Paintable (circular reference)." ) ; } children . add ( p ) ;
public class DBAdapter { /** * Adds a JSON string to the DB . * @ param obj the JSON to record * @ param table the table to insert into * @ return the number of rows in the table , or DB _ OUT _ OF _ MEMORY _ ERROR / DB _ UPDATE _ ERROR */ synchronized int storeObject ( JSONObject obj , Table table ) { } }
if ( ! this . belowMemThreshold ( ) ) { Logger . v ( "There is not enough space left on the device to store data, data discarded" ) ; return DB_OUT_OF_MEMORY_ERROR ; } final String tableName = table . getName ( ) ; Cursor cursor = null ; int count = DB_UPDATE_ERROR ; // noinspection TryFinallyCanBeTryWithResources try { final SQLiteDatabase db = dbHelper . getWritableDatabase ( ) ; final ContentValues cv = new ContentValues ( ) ; cv . put ( KEY_DATA , obj . toString ( ) ) ; cv . put ( KEY_CREATED_AT , System . currentTimeMillis ( ) ) ; db . insert ( tableName , null , cv ) ; cursor = db . rawQuery ( "SELECT COUNT(*) FROM " + tableName , null ) ; cursor . moveToFirst ( ) ; count = cursor . getInt ( 0 ) ; } catch ( final SQLiteException e ) { getConfigLogger ( ) . verbose ( "Error adding data to table " + tableName + " Recreating DB" ) ; if ( cursor != null ) { cursor . close ( ) ; cursor = null ; } dbHelper . deleteDatabase ( ) ; } finally { if ( cursor != null ) { cursor . close ( ) ; } dbHelper . close ( ) ; } return count ;
public class NaetherImpl { /** * / * ( non - Javadoc ) * @ see com . tobedevoured . naether . api . Naether # getDependenciesNotation ( ) */ public Set < String > getDependenciesNotation ( ) { } }
Set < String > notations = new HashSet < String > ( ) ; for ( Dependency dependency : currentDependencies ( ) ) { notations . add ( Notation . generate ( dependency ) ) ; } return notations ;
public class SessionImpl { /** * { @ inheritDoc } */ public String getNamespaceURI ( String prefix ) throws NamespaceException , RepositoryException { } }
String uri = null ; // look in session first if ( namespaces . size ( ) > 0 ) { uri = namespaces . get ( prefix ) ; if ( uri != null ) { return uri ; } } return workspace . getNamespaceRegistry ( ) . getURI ( prefix ) ;
public class Http { /** * Executes a PATCH request . * @ param uri url of resource . * @ param content content to be posted . * @ return { @ link Patch } object . */ public static Patch patch ( String uri , String content ) { } }
return patch ( uri , content . getBytes ( ) , CONNECTION_TIMEOUT , READ_TIMEOUT ) ;
public class JDK8TriggerBuilder { /** * Use a < code > TriggerKey < / code > with the given name and default group to * identify the Trigger . * If none of the ' withIdentity ' methods are set on the JDK8TriggerBuilder , * then a random , unique TriggerKey will be generated . * @ param name * the name element for the Trigger ' s TriggerKey * @ return the updated JDK8TriggerBuilder * @ see TriggerKey * @ see ITrigger # getKey ( ) */ @ Nonnull public JDK8TriggerBuilder < T > withIdentity ( final String name ) { } }
m_aTriggerKey = new TriggerKey ( name , null ) ; return this ;
public class DateRangeValidationMatcher { /** * Converts the supplied date to it ' s calendar representation . The { @ code datePattern } is * used for parsing the date . * @ param date the date to parse * @ param datePattern the date format to use when parsing the date * @ return the calendar representation */ protected Calendar toCalender ( String date , String datePattern ) { } }
SimpleDateFormat dateFormat = new SimpleDateFormat ( datePattern ) ; Calendar cal = Calendar . getInstance ( ) ; try { cal . setTime ( dateFormat . parse ( date ) ) ; } catch ( ParseException e ) { throw new CitrusRuntimeException ( String . format ( "Error parsing date '%s' using pattern '%s'" , date , datePattern ) , e ) ; } return cal ;
public class CsvProcessor { /** * Build and return a header string made up of quoted column names . * @ param appendLineTermination * Set to true to add the newline to the end of the line . */ public String buildHeaderLine ( boolean appendLineTermination ) { } }
checkEntityConfig ( ) ; StringBuilder sb = new StringBuilder ( ) ; boolean first = true ; for ( ColumnInfo < ? > columnInfo : allColumnInfos ) { if ( first ) { first = false ; } else { sb . append ( columnSeparator ) ; } String header = columnInfo . getColumnName ( ) ; // need to protect the column if it contains a quote if ( header . indexOf ( columnQuote ) >= 0 ) { writeQuoted ( sb , header ) ; continue ; } sb . append ( columnQuote ) ; sb . append ( header ) ; sb . append ( columnQuote ) ; } if ( appendLineTermination ) { sb . append ( lineTermination ) ; } return sb . toString ( ) ;
public class CmsDriverManager { /** * Helper method for finding the ' best ' URL name to use for a new URL name mapping . < p > * Since the name given as a parameter may be already used , this method will try to append numeric suffixes * to the name to find a mapping name which is not used . < p > * @ param dbc the current database context * @ param name the name of the mapping * @ param structureId the structure id to which the name is mapped * @ return the best name which was found for the new mapping * @ throws CmsDataAccessException if something goes wrong */ protected String findBestNameForUrlNameMapping ( CmsDbContext dbc , String name , CmsUUID structureId ) throws CmsDataAccessException { } }
List < CmsUrlNameMappingEntry > entriesStartingWithName = getVfsDriver ( dbc ) . readUrlNameMappingEntries ( dbc , false , CmsUrlNameMappingFilter . ALL . filterNamePattern ( name + "%" ) . filterRejectStructureId ( structureId ) ) ; Set < String > usedNames = new HashSet < String > ( ) ; for ( CmsUrlNameMappingEntry entry : entriesStartingWithName ) { usedNames . add ( entry . getName ( ) ) ; } int counter = 0 ; String numberedName ; do { numberedName = getNumberedName ( name , counter ) ; counter += 1 ; } while ( usedNames . contains ( numberedName ) ) ; return numberedName ;