signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DateRangeChooser { /** * Returns the selected start date . This may be null if there is no active selection or if the
* selected date range has no start date .
* @ return Starting date of range , or null . */
public Date getStartDate ( ) { } } | DateRange range = getSelectedRange ( ) ; return range == null ? null : range . getStartDate ( ) ; |
public class ConnectionPartition { /** * This method is a replacement for finalize ( ) but avoids all its pitfalls ( see Joshua Bloch et . all ) .
* Keeps a handle on the connection . If the application called closed , then it means that the handle gets pushed back to the connection
* pool and thus we get a strong reference again . If the application forgot to call close ( ) and subsequently lost the strong reference to it ,
* the handle becomes eligible to garbage connection and thus the the finalizeReferent method kicks in to safely close off the database
* handle . Note that we do not return the connectionHandle back to the pool since that is not possible ( for otherwise the GC would not
* have kicked in ) , but we merely safely release the database internal handle and update our counters instead .
* @ param connectionHandle handle to watch */
protected void trackConnectionFinalizer ( ConnectionHandle connectionHandle ) { } } | if ( ! this . disableTracking ) { // assert ! connectionHandle . getPool ( ) . getFinalizableRefs ( ) . containsKey ( connectionHandle ) : " Already tracking this handle " ;
Connection con = connectionHandle . getInternalConnection ( ) ; if ( con != null && con instanceof Proxy && Proxy . getInvocationHandler ( con ) instanceof MemorizeTransactionProxy ) { try { // if this is a proxy , get the correct target so that when we call close we ' re actually calling close on the database
// handle and not a proxy - based close .
con = ( Connection ) Proxy . getInvocationHandler ( con ) . invoke ( con , ConnectionHandle . class . getMethod ( "getProxyTarget" ) , null ) ; } catch ( Throwable t ) { logger . error ( "Error while attempting to track internal db connection" , t ) ; // should never happen
} } final Connection internalDBConnection = con ; final BoneCP pool = connectionHandle . getPool ( ) ; connectionHandle . getPool ( ) . getFinalizableRefs ( ) . put ( internalDBConnection , new FinalizableWeakReference < ConnectionHandle > ( connectionHandle , connectionHandle . getPool ( ) . getFinalizableRefQueue ( ) ) { @ SuppressWarnings ( "synthetic-access" ) public void finalizeReferent ( ) { try { pool . getFinalizableRefs ( ) . remove ( internalDBConnection ) ; if ( internalDBConnection != null && ! internalDBConnection . isClosed ( ) ) { // safety !
logger . warn ( "BoneCP detected an unclosed connection " + ConnectionPartition . this . poolName + "and will now attempt to close it for you. " + "You should be closing this connection in your application - enable connectionWatch for additional debugging assistance or set disableConnectionTracking to true to disable this feature entirely." ) ; internalDBConnection . close ( ) ; updateCreatedConnections ( - 1 ) ; } } catch ( Throwable t ) { logger . error ( "Error while closing off internal db connection" , t ) ; } } } ) ; } |
public class Matrix4x3f { /** * Apply a rotation transformation to this matrix to make < code > - z < / code > point along < code > dir < / code > .
* If < code > M < / code > is < code > this < / code > matrix and < code > L < / code > the lookalong rotation matrix ,
* then the new matrix will be < code > M * L < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * L * v < / code > , the
* lookalong rotation transformation will be applied first !
* This is equivalent to calling
* { @ link # lookAt ( float , float , float , float , float , float , float , float , float ) lookAt ( ) }
* with < code > eye = ( 0 , 0 , 0 ) < / code > and < code > center = dir < / code > .
* In order to set the matrix to a lookalong transformation without post - multiplying it ,
* use { @ link # setLookAlong ( float , float , float , float , float , float ) setLookAlong ( ) }
* @ see # lookAt ( float , float , float , float , float , float , float , float , float )
* @ see # setLookAlong ( float , float , float , float , float , float )
* @ param dirX
* the x - coordinate of the direction to look along
* @ param dirY
* the y - coordinate of the direction to look along
* @ param dirZ
* the z - coordinate of the direction to look along
* @ param upX
* the x - coordinate of the up vector
* @ param upY
* the y - coordinate of the up vector
* @ param upZ
* the z - coordinate of the up vector
* @ return this */
public Matrix4x3f lookAlong ( float dirX , float dirY , float dirZ , float upX , float upY , float upZ ) { } } | return lookAlong ( dirX , dirY , dirZ , upX , upY , upZ , this ) ; |
public class OmsTrentoP { /** * Initializating the array .
* The array is the net . If there is a FeatureCollection extract values from
* it . The Array is order following the ID .
* oss : if the FeatureCillection is null a IllegalArgumentException is throw
* in { @ link OmsTrentoP # verifyParameter ( ) } .
* @ param isAreaNotAllDry it is true if there is only a percentage of the input area dry .
* @ throws IllegalArgumentException
* if the FeatureCollection hasn ' t the correct parameters . */
private void setNetworkPipes ( boolean isAreaNotAllDry ) throws Exception { } } | int length = inPipes . size ( ) ; networkPipes = new Pipe [ length ] ; SimpleFeatureIterator stationsIter = inPipes . features ( ) ; boolean existOut = false ; int tmpOutIndex = 0 ; try { int t = 0 ; while ( stationsIter . hasNext ( ) ) { SimpleFeature feature = stationsIter . next ( ) ; try { /* * extract the value of the ID which is the position ( minus
* 1 ) in the array . */
Number field = ( ( Number ) feature . getAttribute ( TrentoPFeatureType . ID_STR ) ) ; if ( field == null ) { pm . errorMessage ( msg . message ( "trentoP.error.number" ) + TrentoPFeatureType . ID_STR ) ; throw new IllegalArgumentException ( msg . message ( "trentoP.error.number" ) + TrentoPFeatureType . ID_STR ) ; } if ( field . equals ( pOutPipe ) ) { tmpOutIndex = t ; existOut = true ; } networkPipes [ t ] = new Pipe ( feature , pMode , isAreaNotAllDry , pm ) ; t ++ ; } catch ( NullPointerException e ) { pm . errorMessage ( msg . message ( "trentop.illegalNet" ) ) ; throw new IllegalArgumentException ( msg . message ( "trentop.illegalNet" ) ) ; } } } finally { stationsIter . close ( ) ; } if ( ! existOut ) { } // set the id where drain of the outlet .
networkPipes [ tmpOutIndex ] . setIdPipeWhereDrain ( 0 ) ; networkPipes [ tmpOutIndex ] . setIndexPipeWhereDrain ( - 1 ) ; // start to construct the net .
int numberOfPoint = networkPipes [ tmpOutIndex ] . point . length - 1 ; findIdThatDrainsIntoIndex ( tmpOutIndex , networkPipes [ tmpOutIndex ] . point [ 0 ] ) ; findIdThatDrainsIntoIndex ( tmpOutIndex , networkPipes [ tmpOutIndex ] . point [ numberOfPoint ] ) ; List < Integer > missingId = new ArrayList < Integer > ( ) ; for ( Pipe pipe : networkPipes ) { if ( pipe . getIdPipeWhereDrain ( ) == null && pipe . getId ( ) != pOutPipe ) { missingId . add ( pipe . getId ( ) ) ; } } if ( missingId . size ( ) > 0 ) { String errorMsg = "One of the following pipes doesn't have a connected pipe towards the outlet: " + Arrays . toString ( missingId . toArray ( new Integer [ 0 ] ) ) ; pm . errorMessage ( msg . message ( errorMsg ) ) ; throw new IllegalArgumentException ( errorMsg ) ; } verifyNet ( networkPipes , pm ) ; |
public class LdapUtils { /** * Iterate through all the values of the specified Attribute calling back to
* the specified callbackHandler .
* @ param attribute the Attribute to work with ; not < code > null < / code > .
* @ param callbackHandler the callbackHandler ; not < code > null < / code > .
* @ since 1.3 */
public static void iterateAttributeValues ( Attribute attribute , AttributeValueCallbackHandler callbackHandler ) { } } | Assert . notNull ( attribute , "Attribute must not be null" ) ; Assert . notNull ( callbackHandler , "callbackHandler must not be null" ) ; if ( attribute instanceof Iterable ) { int i = 0 ; for ( Object obj : ( Iterable ) attribute ) { handleAttributeValue ( attribute . getID ( ) , obj , i , callbackHandler ) ; i ++ ; } } else { for ( int i = 0 ; i < attribute . size ( ) ; i ++ ) { try { handleAttributeValue ( attribute . getID ( ) , attribute . get ( i ) , i , callbackHandler ) ; } catch ( javax . naming . NamingException e ) { throw convertLdapException ( e ) ; } } } |
public class BMPCProxy { /** * Creates a new HAR attached to the proxy .
* @ param initialPageRef Name of the first pageRef that should be used by
* the HAR . If " null " , default to " Page 1"
* @ param captureHeaders Enables capturing of HTTP Headers
* @ param captureContent Enables capturing of HTTP Response Content ( body )
* @ param captureBinaryContent Enabled capturing of HTTP Response
* Binary Content ( in bse64 encoding )
* @ return JsonObject HAR response if this proxy was previously collecting
* another HAR , effectively considering that concluded .
* " null " otherwise . */
public JsonObject newHar ( String initialPageRef , boolean captureHeaders , boolean captureContent , boolean captureBinaryContent ) { } } | try { // Request BMP to create a new HAR for this Proxy
HttpPut request = new HttpPut ( requestURIBuilder ( ) . setPath ( proxyURIPath ( ) + "/har" ) . build ( ) ) ; // Add form parameters to the request
applyFormParamsToHttpRequest ( request , new BasicNameValuePair ( "initialPageRef" , initialPageRef ) , new BasicNameValuePair ( "captureHeaders" , Boolean . toString ( captureHeaders ) ) , new BasicNameValuePair ( "captureContent" , Boolean . toString ( captureContent ) ) , new BasicNameValuePair ( "captureBinaryContent" , Boolean . toString ( captureBinaryContent ) ) ) ; // Execute request
CloseableHttpResponse response = HTTPclient . execute ( request ) ; // Parse response into JSON
JsonObject previousHar = httpResponseToJsonObject ( response ) ; // Close HTTP Response
response . close ( ) ; return previousHar ; } catch ( Exception e ) { throw new BMPCUnableToCreateHarException ( e ) ; } |
public class AbstractAggregatorImpl { /** * Returns the name for this aggregator
* This method is called during aggregator intialization . Subclasses may
* override this method to initialize the aggregator using a different
* name . Use the public { @ link IAggregator # getName ( ) } method
* to get the name of an initialized aggregator .
* @ param configMap
* A Map having key - value pairs denoting configuration settings for the aggregator servlet
* @ return The aggregator name */
protected String getAggregatorName ( Map < String , String > configMap ) { } } | // trim leading and trailing ' / '
String alias = ( String ) configMap . get ( "alias" ) ; // $ NON - NLS - 1 $
while ( alias . charAt ( 0 ) == '/' ) alias = alias . substring ( 1 ) ; while ( alias . charAt ( alias . length ( ) - 1 ) == '/' ) alias = alias . substring ( 0 , alias . length ( ) - 1 ) ; return alias ; |
public class AmazonEC2Client { /** * Enables a virtual private gateway ( VGW ) to propagate routes to the specified route table of a VPC .
* @ param enableVgwRoutePropagationRequest
* Contains the parameters for EnableVgwRoutePropagation .
* @ return Result of the EnableVgwRoutePropagation operation returned by the service .
* @ sample AmazonEC2 . EnableVgwRoutePropagation
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / EnableVgwRoutePropagation " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public EnableVgwRoutePropagationResult enableVgwRoutePropagation ( EnableVgwRoutePropagationRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeEnableVgwRoutePropagation ( request ) ; |
public class Update { /** * A key - value map that contains the parameters associated with the update .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setParams ( java . util . Collection ) } or { @ link # withParams ( java . util . Collection ) } if you want to override the
* existing values .
* @ param params
* A key - value map that contains the parameters associated with the update .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Update withParams ( UpdateParam ... params ) { } } | if ( this . params == null ) { setParams ( new java . util . ArrayList < UpdateParam > ( params . length ) ) ; } for ( UpdateParam ele : params ) { this . params . add ( ele ) ; } return this ; |
public class ParameterImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } } | switch ( featureID ) { case BpsimPackage . PARAMETER__PARAMETER_VALUE_GROUP : return ( ( InternalEList < ? > ) getParameterValueGroup ( ) ) . basicRemove ( otherEnd , msgs ) ; case BpsimPackage . PARAMETER__PARAMETER_VALUE : return ( ( InternalEList < ? > ) getParameterValue ( ) ) . basicRemove ( otherEnd , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ; |
public class InMemoryDocumentSessionOperations { /** * Tracks the entity .
* @ param entityType Entity class
* @ param id Id of document
* @ param document raw entity
* @ param metadata raw document metadata
* @ param noTracking no tracking
* @ return entity */
public Object trackEntity ( Class entityType , String id , ObjectNode document , ObjectNode metadata , boolean noTracking ) { } } | noTracking = this . noTracking || noTracking ; // if noTracking is session - wide then we want to override the passed argument
if ( StringUtils . isEmpty ( id ) ) { return deserializeFromTransformer ( entityType , null , document ) ; } DocumentInfo docInfo = documentsById . getValue ( id ) ; if ( docInfo != null ) { // the local instance may have been changed , we adhere to the current Unit of Work
// instance , and return that , ignoring anything new .
if ( docInfo . getEntity ( ) == null ) { docInfo . setEntity ( entityToJson . convertToEntity ( entityType , id , document ) ) ; } if ( ! noTracking ) { includedDocumentsById . remove ( id ) ; documentsByEntity . put ( docInfo . getEntity ( ) , docInfo ) ; } return docInfo . getEntity ( ) ; } docInfo = includedDocumentsById . get ( id ) ; if ( docInfo != null ) { if ( docInfo . getEntity ( ) == null ) { docInfo . setEntity ( entityToJson . convertToEntity ( entityType , id , document ) ) ; } if ( ! noTracking ) { includedDocumentsById . remove ( id ) ; documentsById . add ( docInfo ) ; documentsByEntity . put ( docInfo . getEntity ( ) , docInfo ) ; } return docInfo . getEntity ( ) ; } Object entity = entityToJson . convertToEntity ( entityType , id , document ) ; String changeVector = metadata . get ( Constants . Documents . Metadata . CHANGE_VECTOR ) . asText ( ) ; if ( changeVector == null ) { throw new IllegalStateException ( "Document " + id + " must have Change Vector" ) ; } if ( ! noTracking ) { DocumentInfo newDocumentInfo = new DocumentInfo ( ) ; newDocumentInfo . setId ( id ) ; newDocumentInfo . setDocument ( document ) ; newDocumentInfo . setMetadata ( metadata ) ; newDocumentInfo . setEntity ( entity ) ; newDocumentInfo . setChangeVector ( changeVector ) ; documentsById . add ( newDocumentInfo ) ; documentsByEntity . put ( entity , newDocumentInfo ) ; } return entity ; |
public class RepositoryUtils { /** * Converts method signature to human readable string .
* @ param type root type ( method may be called not from declaring class )
* @ param method method to print
* @ return string representation for method */
public static String methodToString ( final Class < ? > type , final Method method ) { } } | final StringBuilder res = new StringBuilder ( ) ; res . append ( type . getSimpleName ( ) ) . append ( '#' ) . append ( method . getName ( ) ) . append ( '(' ) ; int i = 0 ; for ( Class < ? > param : method . getParameterTypes ( ) ) { if ( i > 0 ) { res . append ( ", " ) ; } final Type generic = method . getGenericParameterTypes ( ) [ i ] ; if ( generic instanceof TypeVariable ) { // using generic name , because its simpler to search visually in code
res . append ( '<' ) . append ( ( ( TypeVariable ) generic ) . getName ( ) ) . append ( '>' ) ; } else { res . append ( param . getSimpleName ( ) ) ; } i ++ ; } res . append ( ')' ) ; return res . toString ( ) ; |
public class Partitioner { /** * Get low water mark :
* ( 1 ) Use { @ link ConfigurationKeys # SOURCE _ QUERYBASED _ START _ VALUE } iff it is a full dump ( or watermark override is enabled )
* ( 2 ) Otherwise use previous watermark ( fallback to { @ link ConfigurationKeys # SOURCE _ QUERYBASED _ START _ VALUE } iff previous watermark is unavailable )
* @ param extractType Extract type
* @ param watermarkType Watermark type
* @ param previousWatermark Previous water mark
* @ param deltaForNextWatermark delta number for next water mark
* @ return low water mark in { @ link Partitioner # WATERMARKTIMEFORMAT } */
@ VisibleForTesting protected long getLowWatermark ( ExtractType extractType , WatermarkType watermarkType , long previousWatermark , int deltaForNextWatermark ) { } } | long lowWatermark = ConfigurationKeys . DEFAULT_WATERMARK_VALUE ; if ( this . isFullDump ( ) || this . isWatermarkOverride ( ) ) { String timeZone = this . state . getProp ( ConfigurationKeys . SOURCE_TIMEZONE , ConfigurationKeys . DEFAULT_SOURCE_TIMEZONE ) ; /* * SOURCE _ QUERYBASED _ START _ VALUE could be :
* - a simple string , e . g . " 12345"
* - a timestamp string , e . g . " 2014010100000"
* - a string with a time directive , e . g . " CURRENTDAY - X " , " CURRENTHOUR - X " , ( X is a number ) */
lowWatermark = Utils . getLongWithCurrentDate ( this . state . getProp ( ConfigurationKeys . SOURCE_QUERYBASED_START_VALUE ) , timeZone ) ; LOG . info ( "Overriding low water mark with the given start value: " + lowWatermark ) ; } else { if ( isSnapshot ( extractType ) ) { lowWatermark = this . getSnapshotLowWatermark ( watermarkType , previousWatermark , deltaForNextWatermark ) ; } else { lowWatermark = this . getAppendLowWatermark ( watermarkType , previousWatermark , deltaForNextWatermark ) ; } } return ( lowWatermark == 0 ? ConfigurationKeys . DEFAULT_WATERMARK_VALUE : lowWatermark ) ; |
public class OldNgramExtractor { /** * This was the method found in the < i > com . cybozu . labs . langdetect . Detector < / i > class , it was used to extract
* grams from the to - analyze text .
* NOTE : although it adds the first ngram with space , it does not add the last n - gram with space . example : " foo " gives " fo " but not " oo " ! .
* It is not clear yet whether this is desired ( and why ) or a bug .
* TODO replace this algorithm with a simpler , faster one that uses less memory : only by position shifting . also , the returned list size
* can be computed before making it ( based on text length and number of n - grams ) . */
@ NotNull @ Deprecated public static List < String > extractNGrams ( @ NotNull CharSequence text , @ Nullable Filter filter ) { } } | List < String > list = new ArrayList < > ( ) ; NGram ngram = new NGram ( ) ; for ( int i = 0 ; i < text . length ( ) ; ++ i ) { ngram . addChar ( text . charAt ( i ) ) ; for ( int n = 1 ; n <= NGram . N_GRAM ; ++ n ) { String w = ngram . get ( n ) ; if ( w != null ) { // TODO this null check is ugly
if ( filter == null || filter . use ( w ) ) { list . add ( w ) ; } } } } return list ; |
public class MembershipTypeHandlerImpl { /** * Gets membership type from cache . */
private MembershipType getFromCache ( String name ) { } } | return ( MembershipType ) cache . get ( name , CacheType . MEMBERSHIPTYPE ) ; |
public class UniformCrossover { /** * Has a probability < i > crossoverRate < / i > of performing the crossover where the operator will select randomly which parent donates the gene . < br >
* One of the parent may be favored if the bias is different than 0.5
* Otherwise , returns the genes of a random parent .
* @ return The crossover result . See { @ link CrossoverResult } . */
@ Override public CrossoverResult crossover ( ) { } } | // select the parents
double [ ] [ ] parents = parentSelection . selectParents ( ) ; double [ ] resultGenes = parents [ 0 ] ; boolean isModified = false ; if ( rng . nextDouble ( ) < crossoverRate ) { // Crossover
resultGenes = new double [ parents [ 0 ] . length ] ; for ( int i = 0 ; i < resultGenes . length ; ++ i ) { resultGenes [ i ] = ( ( rng . nextDouble ( ) < parentBiasFactor ) ? parents [ 0 ] : parents [ 1 ] ) [ i ] ; } isModified = true ; } return new CrossoverResult ( isModified , resultGenes ) ; |
public class ElementBase { /** * Returns true if parentClass can be a parent of childClass .
* @ param childClass The child class .
* @ param parentClass The parent class .
* @ return True if parentClass can be a parent of childClass . */
public static boolean canAcceptParent ( Class < ? extends ElementBase > childClass , Class < ? extends ElementBase > parentClass ) { } } | return allowedParentClasses . isRelated ( childClass , parentClass ) ; |
public class InternalXbaseParser { /** * InternalXbase . g : 333:1 : entryRuleOpCompare : ruleOpCompare EOF ; */
public final void entryRuleOpCompare ( ) throws RecognitionException { } } | try { // InternalXbase . g : 334:1 : ( ruleOpCompare EOF )
// InternalXbase . g : 335:1 : ruleOpCompare EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getOpCompareRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleOpCompare ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getOpCompareRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ; |
public class MemoryRemoteTable { /** * Do a remote action .
* Not implemented .
* @ param strCommand Command to perform remotely .
* @ param properties Properties for this command ( optional ) .
* @ return boolean success . */
public Object doRemoteAction ( String strCommand , Map < String , Object > properties ) throws DBException , RemoteException { } } | return null ; // Not supported |
public class MutableArray { /** * Sets a Dictionary object at the given index .
* @ param index the index . This value must not exceed the bounds of the array .
* @ param value the Dictionary object
* @ return The self object */
@ NonNull @ Override public MutableArray setDictionary ( int index , Dictionary value ) { } } | return setValue ( index , value ) ; |
public class Util { /** * setEquals determines whether two string sets are identical .
* @ param a the first set .
* @ param b the second set .
* @ return whether a equals to b . */
public static boolean setEquals ( List < String > a , List < String > b ) { } } | if ( a == null ) { a = new ArrayList < > ( ) ; } if ( b == null ) { b = new ArrayList < > ( ) ; } if ( a . size ( ) != b . size ( ) ) { return false ; } Collections . sort ( a ) ; Collections . sort ( b ) ; for ( int i = 0 ; i < a . size ( ) ; i ++ ) { if ( ! a . get ( i ) . equals ( b . get ( i ) ) ) { return false ; } } return true ; |
public class OperatorContext { /** * Returns how much revocable memory will be revoked by the operator */
public long requestMemoryRevoking ( ) { } } | long revokedMemory = 0L ; Runnable listener = null ; synchronized ( this ) { if ( ! isMemoryRevokingRequested ( ) && operatorMemoryContext . getRevocableMemory ( ) > 0 ) { memoryRevokingRequested = true ; revokedMemory = operatorMemoryContext . getRevocableMemory ( ) ; listener = memoryRevocationRequestListener ; } } if ( listener != null ) { runListener ( listener ) ; } return revokedMemory ; |
public class ReferenceParam { /** * Returns a new param containing the same value as this param , but with the type copnverted
* to { @ link QuantityParam } . This is useful if you are using reference parameters and want to handle
* chained parameters of different types in a single method .
* See < a href = " http : / / jamesagnew . github . io / hapi - fhir / doc _ rest _ operations . html # dynamic _ chains " > Dynamic Chains < / a >
* in the HAPI FHIR documentation for an example of how to use this method . */
public QuantityParam toQuantityParam ( FhirContext theContext ) { } } | QuantityParam retVal = new QuantityParam ( ) ; retVal . setValueAsQueryToken ( theContext , null , null , getValueAsQueryToken ( theContext ) ) ; return retVal ; |
public class RedirectedReadResultEntry { /** * Handles an exception that was caught . If this is the first exception ever caught , and it is eligible for retries ,
* then this method will invoke the retryGetEntry that was passed through the constructor to get a new entry .
* If that succeeds , the new entry is then used to serve up the result .
* The new entry will only be accepted if it is not a RedirectedReadResultEntry ( since that is likely to get us in the
* same situation again ) .
* @ param ex The exception to inspect .
* @ return True if the exception was handled properly and the base entry swapped , false otherwise . */
private boolean handle ( Throwable ex ) { } } | ex = Exceptions . unwrap ( ex ) ; if ( this . secondEntry == null && isRetryable ( ex ) ) { // This is the first attempt and we caught a retry - eligible exception ; issue the query for the new entry .
CompletableReadResultEntry newEntry = this . retryGetEntry . apply ( getStreamSegmentOffset ( ) , this . firstEntry . getRequestedReadLength ( ) , this . redirectedSegmentId ) ; if ( ! ( newEntry instanceof RedirectedReadResultEntry ) ) { // Request the content for the new entry ( if that fails , we do not change any state ) .
newEntry . requestContent ( this . timer == null ? DEFAULT_TIMEOUT : timer . getRemaining ( ) ) ; assert newEntry . getStreamSegmentOffset ( ) == this . adjustedOffset : "new entry's StreamSegmentOffset does not match the adjusted offset of this entry" ; assert newEntry . getRequestedReadLength ( ) == this . firstEntry . getRequestedReadLength ( ) : "new entry does not have the same RequestedReadLength" ; // After all checks are done , update the internal state to use the new entry .
newEntry . setCompletionCallback ( this . firstEntry . getCompletionCallback ( ) ) ; this . secondEntry = newEntry ; setOutcomeAfterSecondEntry ( ) ; return true ; } } return false ; |
public class Rowtime { /** * Sets a built - in timestamp extractor that converts an existing { @ link Long } or
* { @ link Types # SQL _ TIMESTAMP } field into the rowtime attribute .
* @ param fieldName The field to convert into a rowtime attribute . */
public Rowtime timestampsFromField ( String fieldName ) { } } | internalProperties . putString ( ROWTIME_TIMESTAMPS_TYPE , ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD ) ; internalProperties . putString ( ROWTIME_TIMESTAMPS_FROM , fieldName ) ; return this ; |
public class UniversalProjectReader { /** * This could be a self - extracting archive . If we understand the format , expand
* it and check the content for files we can read .
* @ param stream schedule data
* @ return ProjectFile instance */
private ProjectFile handleDosExeFile ( InputStream stream ) throws Exception { } } | File file = InputStreamHelper . writeStreamToTempFile ( stream , ".tmp" ) ; InputStream is = null ; try { is = new FileInputStream ( file ) ; if ( is . available ( ) > 1350 ) { StreamHelper . skip ( is , 1024 ) ; // Bytes at offset 1024
byte [ ] data = new byte [ 2 ] ; is . read ( data ) ; if ( matchesFingerprint ( data , WINDOWS_NE_EXE_FINGERPRINT ) ) { StreamHelper . skip ( is , 286 ) ; // Bytes at offset 1312
data = new byte [ 34 ] ; is . read ( data ) ; if ( matchesFingerprint ( data , PRX_FINGERPRINT ) ) { is . close ( ) ; is = null ; return readProjectFile ( new P3PRXFileReader ( ) , file ) ; } } if ( matchesFingerprint ( data , STX_FINGERPRINT ) ) { StreamHelper . skip ( is , 31742 ) ; // Bytes at offset 32768
data = new byte [ 4 ] ; is . read ( data ) ; if ( matchesFingerprint ( data , PRX3_FINGERPRINT ) ) { is . close ( ) ; is = null ; return readProjectFile ( new SureTrakSTXFileReader ( ) , file ) ; } } } return null ; } finally { StreamHelper . closeQuietly ( is ) ; FileHelper . deleteQuietly ( file ) ; } |
public class JKFactory { /** * Dump beans names . */
public static void dumpBeansNames ( ) { } } | DefaultListableBeanFactory f = ( DefaultListableBeanFactory ) context . getBeanFactory ( ) ; String [ ] beanDefinitionNames = f . getBeanDefinitionNames ( ) ; for ( String name : beanDefinitionNames ) { JK . print ( name , " for class :" , f . getBean ( name ) . getClass ( ) . getName ( ) ) ; } |
public class DirectionUtil { /** * Returns which of the eight compass directions is associated with the specified angle theta .
* < em > Note : < / em > that the angle supplied is assumed to increase clockwise around the origin
* ( which screen angles do ) rather than counter - clockwise around the origin ( which cartesian
* angles do ) and < code > NORTH < / code > is considered to point toward the top of the screen . */
public static int getDirection ( double theta ) { } } | theta = ( ( theta + Math . PI ) * 4 ) / Math . PI ; return ( int ) ( Math . round ( theta ) + WEST ) % 8 ; |
public class EdmondsMaximumMatching { /** * Augment all ancestors in the tree of vertex ' v ' .
* @ param v the leaf to augment from */
private void augment ( int v ) { } } | int n = buildPath ( path , 0 , v , NIL ) ; for ( int i = 2 ; i < n ; i += 2 ) { matching . match ( path [ i ] , path [ i - 1 ] ) ; } |
public class Curies { /** * Resolves a link - relation type ( curied or full rel ) and returns the curied form , or
* the unchanged rel , if no matching CURI is registered .
* @ param rel link - relation type
* @ return curied link - relation type */
public String resolve ( final String rel ) { } } | final Optional < CuriTemplate > curiTemplate = matchingCuriTemplateFor ( curies , rel ) ; return curiTemplate . map ( t -> t . curiedRelFrom ( rel ) ) . orElse ( rel ) ; |
public class FairScheduler { /** * Update a job ' s locality level and locality wait variables given that that
* it has just launched a map task on a given task tracker . */
private void updateLastMapLocalityLevel ( JobInProgress job , Task mapTaskLaunched , TaskTrackerStatus tracker ) { } } | JobInfo info = infos . get ( job ) ; LocalityLevel localityLevel = localManager . taskToLocalityLevel ( job , mapTaskLaunched , tracker ) ; info . lastMapLocalityLevel = localityLevel ; info . timeWaitedForLocalMap = 0 ; |
public class MBeanServerService { /** * { @ inheritDoc } */
public synchronized void start ( final StartContext context ) throws StartException { } } | // If the platform MBeanServer was set up to be the PluggableMBeanServer , use that otherwise create a new one and delegate
MBeanServer platform = ManagementFactory . getPlatformMBeanServer ( ) ; PluggableMBeanServerImpl pluggable = platform instanceof PluggableMBeanServerImpl ? ( PluggableMBeanServerImpl ) platform : new PluggableMBeanServerImpl ( platform , null ) ; MBeanServerDelegate delegate = platform instanceof PluggableMBeanServerImpl ? ( ( PluggableMBeanServerImpl ) platform ) . getMBeanServerDelegate ( ) : null ; pluggable . setAuditLogger ( auditLoggerInfo ) ; pluggable . setAuthorizer ( authorizer ) ; pluggable . setSecurityIdentitySupplier ( securityIdentitySupplier ) ; pluggable . setJmxEffect ( jmxEffect ) ; authorizer . setNonFacadeMBeansSensitive ( coreMBeanSensitivity ) ; if ( resolvedDomainName != null || expressionsDomainName != null ) { // TODO make these configurable
ConfiguredDomains configuredDomains = new ConfiguredDomains ( resolvedDomainName , expressionsDomainName ) ; showModelPlugin = new ModelControllerMBeanServerPlugin ( pluggable , configuredDomains , modelControllerValue . getValue ( ) , notificationRegistryValue . getValue ( ) , delegate , legacyWithProperPropertyFormat , processType , managementModelProviderValue . getValue ( ) , isMasterHc ) ; pluggable . addPlugin ( showModelPlugin ) ; } mBeanServer = pluggable ; |
public class DefaultGroovyMethods { /** * Get runtime groovydoc
* @ param holder the groovydoc hold
* @ return runtime groovydoc
* @ since 2.6.0 */
public static groovy . lang . groovydoc . Groovydoc getGroovydoc ( AnnotatedElement holder ) { } } | Groovydoc groovydocAnnotation = holder . < Groovydoc > getAnnotation ( Groovydoc . class ) ; return null == groovydocAnnotation ? EMPTY_GROOVYDOC : new groovy . lang . groovydoc . Groovydoc ( groovydocAnnotation . value ( ) , holder ) ; |
public class PluralRanges { /** * { @ inheritDoc }
* @ deprecated This API is ICU internal only .
* @ hide draft / provisional / internal are hidden on Android */
@ Override @ Deprecated public PluralRanges cloneAsThawed ( ) { } } | PluralRanges result = new PluralRanges ( ) ; result . explicit = explicit . clone ( ) ; result . matrix = matrix . clone ( ) ; return result ; |
public class ServiceDirectoryImpl { /** * Get the ServiceDirectoryManagerFactory .
* It looks up the configuration " com . cisco . oss . foundation . directory . manager . factory . provider " .
* If the configuration is null or the provider instantiation fails , it will instantiate the DefaultServiceDirectoryManagerFactory .
* @ return
* the ServiceDirectoryManagerFactory instance . */
protected ServiceDirectoryManagerFactory getServiceDirectoryManagerFactory ( ) throws ServiceException { } } | if ( isShutdown ) { throw new ServiceException ( ErrorCode . SERVICE_DIRECTORY_IS_SHUTDOWN ) ; } if ( directoryManagerFactory == null ) { // should not allow to return a null
// TODO , make directoryManagerFactory is immutable .
// TODO . remove the initialize and reinit method in ServiceDirectoryManagerFactory and ServiceDirectory
throw new ServiceException ( ErrorCode . SERVICE_DIRECTORY_NULL_ARGUMENT_ERROR , ErrorCode . SERVICE_DIRECTORY_NULL_ARGUMENT_ERROR . getMessageTemplate ( ) , "ServiceDirectoryManagerFactory" ) ; } return directoryManagerFactory ; |
public class Node { /** * Return the number of free < code > Entry < / code > s in this node .
* @ return The number of free < code > Entry < / code > s in this node .
* @ see Entry */
protected int numFreeEntries ( ) { } } | int res = 0 ; for ( int i = 0 ; i < entries . length ; i ++ ) { Entry entry = entries [ i ] ; if ( entry . isEmpty ( ) ) { res ++ ; } } assert ( NUMBER_ENTRIES == entries . length ) ; return res ; |
public class logical_disk { /** * < pre >
* Use this operation to get logical disks .
* < / pre > */
public static logical_disk [ ] get ( nitro_service client ) throws Exception { } } | logical_disk resource = new logical_disk ( ) ; resource . validate ( "get" ) ; return ( logical_disk [ ] ) resource . get_resources ( client ) ; |
public class EventManger { /** * Perform an action and wait for an event .
* The event is signaled with { @ link # signalEvent ( Object , Object ) } .
* @ param eventKey the event key , must not be null .
* @ param timeout the timeout to wait for the event in milliseconds .
* @ param action the action to perform prior waiting for the event , must not be null .
* @ return the event value , may be null .
* @ throws InterruptedException if interrupted while waiting for the event .
* @ throws E */
public R performActionAndWaitForEvent ( K eventKey , long timeout , Callback < E > action ) throws InterruptedException , E { } } | final Reference < R > reference = new Reference < > ( ) ; events . put ( eventKey , reference ) ; try { synchronized ( reference ) { action . action ( ) ; reference . wait ( timeout ) ; } return reference . eventResult ; } finally { events . remove ( eventKey ) ; } |
public class Output { /** * Note : some algorithms MUST redefine this method to return other model categories */
public ModelCategory getModelCategory ( ) { } } | if ( isSupervised ( ) ) return ( isClassifier ( ) ? ( nclasses ( ) > 2 ? ModelCategory . Multinomial : ModelCategory . Binomial ) : ModelCategory . Regression ) ; return ModelCategory . Unknown ; |
public class ConBox { /** * Makes sure that the two interactions are members of the same pathway .
* @ return non - generative constraint */
public static Constraint inSamePathway ( ) { } } | String s1 = "Interaction/stepProcessOf/pathwayOrderOf" ; String s2 = "Interaction/pathwayComponentOf" ; return new OR ( new MappedConst ( new Field ( s1 , s1 , Field . Operation . INTERSECT ) , 0 , 1 ) , new MappedConst ( new Field ( s2 , s2 , Field . Operation . INTERSECT ) , 0 , 1 ) ) ; |
public class JDBC4CallableStatement { /** * Sets the designated parameter to the given java . sql . SQLXML object . */
@ Override public void setSQLXML ( String parameterName , SQLXML xmlObject ) throws SQLException { } } | checkClosed ( ) ; throw SQLError . noSupport ( ) ; |
public class ZipResourceLoader { /** * Get the main attributes for the jar file */
private static String [ ] listZipPath ( final File file , String path ) { } } | // debug ( " listJarPath : " + file + " , " + path ) ;
ArrayList < String > strings = new ArrayList < > ( ) ; try { try ( final JarInputStream jarInputStream = new JarInputStream ( new FileInputStream ( file ) ) ) { ZipEntry nextJarEntry = jarInputStream . getNextEntry ( ) ; while ( nextJarEntry != null ) { if ( nextJarEntry . getName ( ) . startsWith ( path + "/" ) ) { if ( ! nextJarEntry . getName ( ) . endsWith ( "/" ) ) { strings . add ( nextJarEntry . getName ( ) . substring ( path . length ( ) + 1 ) ) ; } } nextJarEntry = jarInputStream . getNextEntry ( ) ; } } } catch ( IOException e ) { return null ; } return strings . toArray ( new String [ strings . size ( ) ] ) ; |
public class AbstractH2Connector { /** * Create a backup file . The file is a ZIP file .
* @ param fDestFile
* Destination filename . May not be < code > null < / code > .
* @ return { @ link ESuccess } */
@ Nonnull public final ESuccess createBackup ( @ Nonnull final File fDestFile ) { } } | ValueEnforcer . notNull ( fDestFile , "DestFile" ) ; LOGGER . info ( "Backing up database '" + getDatabaseName ( ) + "' to " + fDestFile ) ; final DBExecutor aExecutor = new DBExecutor ( this ) ; return aExecutor . executeStatement ( "BACKUP TO '" + fDestFile . getAbsolutePath ( ) + "'" ) ; |
public class CollectionUtil { /** * 获取Collection的最后一个元素 , 如果collection为空返回null . */
public static < T > T getLast ( Collection < T > collection ) { } } | if ( isEmpty ( collection ) ) { return null ; } // 当类型List时 , 直接取得最后一个元素 .
if ( collection instanceof List ) { List < T > list = ( List < T > ) collection ; return list . get ( list . size ( ) - 1 ) ; } return Iterators . getLast ( collection . iterator ( ) ) ; |
public class PutIntegrationResponseResult { /** * A key - value map specifying response parameters that are passed to the method response from the back end . The key
* is a method response header parameter name and the mapped value is an integration response header value , a static
* value enclosed within a pair of single quotes , or a JSON expression from the integration response body . The
* mapping key must match the pattern of < code > method . response . header . { name } < / code > , where < code > name < / code > is a
* valid and unique header name . The mapped non - static value must match the pattern of
* < code > integration . response . header . { name } < / code > or < code > integration . response . body . { JSON - expression } < / code > ,
* where < code > name < / code > is a valid and unique response header name and < code > JSON - expression < / code > is a valid
* JSON expression without the < code > $ < / code > prefix .
* @ param responseParameters
* A key - value map specifying response parameters that are passed to the method response from the back end .
* The key is a method response header parameter name and the mapped value is an integration response header
* value , a static value enclosed within a pair of single quotes , or a JSON expression from the integration
* response body . The mapping key must match the pattern of < code > method . response . header . { name } < / code > , where
* < code > name < / code > is a valid and unique header name . The mapped non - static value must match the pattern of
* < code > integration . response . header . { name } < / code > or
* < code > integration . response . body . { JSON - expression } < / code > , where < code > name < / code > is a valid and unique
* response header name and < code > JSON - expression < / code > is a valid JSON expression without the
* < code > $ < / code > prefix .
* @ return Returns a reference to this object so that method calls can be chained together . */
public PutIntegrationResponseResult withResponseParameters ( java . util . Map < String , String > responseParameters ) { } } | setResponseParameters ( responseParameters ) ; return this ; |
public class ScheduleTaskImpl { /** * translate a urlString and a port definition to a URL Object
* @ param url URL String
* @ param port URL Port Definition
* @ return returns a URL Object
* @ throws MalformedURLException */
private static URL toURL ( String url , int port ) throws MalformedURLException { } } | URL u = HTTPUtil . toURL ( url , true ) ; if ( port == - 1 ) return u ; return new URL ( u . getProtocol ( ) , u . getHost ( ) , port , u . getFile ( ) ) ; |
public class XmlConfigurationSource { /** * Creates an instance of { @ link XmlConfigurationSource } .
* @ throws ConfigurationException when fails to create the { @ link XMLConfiguration }
* @ throws IOException when fails to read from the { @ code reader }
* @ throws NullPointerException if the path to the xml file is null */
public static XmlConfigurationSource create ( Reader reader ) throws ConfigurationException , IOException { } } | return new XmlConfigurationSource ( createConfiguration ( reader ) , DEFAULT_PRIORITY ) ; |
public class MercatorProjection { /** * Converts a latitude coordinate ( in degrees ) to a pixel Y coordinate at a certain scale .
* @ param latitude the latitude coordinate that should be converted .
* @ param scaleFactor the scale factor at which the coordinate should be converted .
* @ return the pixel Y coordinate of the latitude value . */
public static double latitudeToPixelYWithScaleFactor ( double latitude , double scaleFactor , int tileSize ) { } } | double sinLatitude = Math . sin ( latitude * ( Math . PI / 180 ) ) ; long mapSize = getMapSizeWithScaleFactor ( scaleFactor , tileSize ) ; // FIXME improve this formula so that it works correctly without the clipping
double pixelY = ( 0.5 - Math . log ( ( 1 + sinLatitude ) / ( 1 - sinLatitude ) ) / ( 4 * Math . PI ) ) * mapSize ; return Math . min ( Math . max ( 0 , pixelY ) , mapSize ) ; |
public class X509ProxyCertPathValidator { /** * Validates the specified certification path using the specified algorithm parameter set .
* The < code > CertPath < / code > specified must be of a type that is supported by the validation algorithm , otherwise
* an < code > InvalidAlgorithmParameterException < / code > will be thrown . For example , a < code > CertPathValidator < / code >
* that implements the PKIX algorithm validates < code > CertPath < / code > objects of type X . 509.
* @ param certPath the < code > CertPath < / code > to be validated
* @ param params the algorithm parameters
* @ return the result of the validation algorithm
* @ throws java . security . cert . CertPathValidatorException
* if the < code > CertPath < / code > does not validate
* @ throws java . security . InvalidAlgorithmParameterException
* if the specified parameters or the type of the
* specified < code > CertPath < / code > are inappropriate for this < code > CertPathValidator < / code > */
@ SuppressWarnings ( "unchecked" ) public CertPathValidatorResult engineValidate ( CertPath certPath , CertPathParameters params ) throws CertPathValidatorException , InvalidAlgorithmParameterException { } } | if ( certPath == null ) { throw new IllegalArgumentException ( "Certificate path cannot be null" ) ; } List list = certPath . getCertificates ( ) ; if ( list . size ( ) < 1 ) { throw new IllegalArgumentException ( "Certificate path cannot be empty" ) ; } parseParameters ( params ) ; // find the root trust anchor . Validate signatures and see if the
// chain ends in one of the trust root certificates
CertPath trustedCertPath = TrustedCertPathFinder . findTrustedCertPath ( this . keyStore , certPath ) ; // rest of the validation
return validate ( trustedCertPath ) ; |
public class JPAIssues { /** * implements the visitor to find @ Entity classes that have both generated @ Ids and have implemented hashCode / equals . Also looks for eager one to many join
* fetches as that leads to 1 + n queries .
* @ param clsContext
* the context object of the currently parsed class */
@ Override public void visitClassContext ( ClassContext clsContext ) { } } | try { cls = clsContext . getJavaClass ( ) ; catalogClass ( cls ) ; if ( isEntity ) { if ( hasHCEquals && hasId && hasGeneratedValue ) { bugReporter . reportBug ( new BugInstance ( this , BugType . JPAI_HC_EQUALS_ON_MANAGED_ENTITY . name ( ) , LOW_PRIORITY ) . addClass ( cls ) ) ; } if ( hasEagerOneToMany && ! hasFetch ) { bugReporter . reportBug ( new BugInstance ( this , BugType . JPAI_INEFFICIENT_EAGER_FETCH . name ( ) , LOW_PRIORITY ) . addClass ( cls ) ) ; } } if ( ! transactionalMethods . isEmpty ( ) ) { stack = new OpcodeStack ( ) ; super . visitClassContext ( clsContext ) ; } } finally { transactionalMethods = null ; stack = null ; } |
public class ClasspathListingProvider { /** * JBoss returns URLs with the vfszip and vfsfile protocol for resources ,
* and the org . reflections library doesn ' t recognize them . This is more a
* bug inside the reflections library , but we can write a small workaround
* for a quick fix on our side . */
private Set < URL > filterURLs ( final Set < URL > urls ) { } } | final Set < URL > results = new HashSet < URL > ( urls . size ( ) ) ; for ( final URL url : urls ) { String cleanURL = url . toString ( ) ; // Fix JBoss URLs
if ( url . getProtocol ( ) . startsWith ( "vfszip:" ) ) { cleanURL = cleanURL . replaceFirst ( "vfszip:" , "file:" ) ; } else if ( url . getProtocol ( ) . startsWith ( "vfsfile:" ) ) { cleanURL = cleanURL . replaceFirst ( "vfsfile:" , "file:" ) ; } cleanURL = cleanURL . replaceFirst ( "\\.jar/" , ".jar!/" ) ; try { results . add ( new URL ( cleanURL ) ) ; } catch ( final MalformedURLException ex ) { // Shouldn ' t happen , but we can ' t do more to fix this URL .
} } return results ; |
public class ConnectionFactory { /** * Updates the database schema by loading the upgrade script for the version
* specified . The intended use is that if the current schema version is 2.9
* then we would call updateSchema ( conn , " 2.9 " ) . This would load the
* upgrade _ 2.9 . sql file and execute it against the database . The upgrade
* script must update the ' version ' in the properties table .
* @ param conn the database connection object
* @ param appExpectedVersion the schema version that the application expects
* @ param currentDbVersion the current schema version of the database
* @ throws DatabaseException thrown if there is an exception upgrading the
* database schema */
private void updateSchema ( Connection conn , DependencyVersion appExpectedVersion , DependencyVersion currentDbVersion ) throws DatabaseException { } } | if ( connectionString . startsWith ( "jdbc:h2:file:" ) ) { LOGGER . debug ( "Updating database structure" ) ; final String updateFile = String . format ( DB_STRUCTURE_UPDATE_RESOURCE , currentDbVersion . toString ( ) ) ; try ( InputStream is = FileUtils . getResourceAsStream ( updateFile ) ) { if ( is == null ) { throw new DatabaseException ( String . format ( "Unable to load update file '%s'" , updateFile ) ) ; } final String dbStructureUpdate = IOUtils . toString ( is , StandardCharsets . UTF_8 ) ; Statement statement = null ; try { statement = conn . createStatement ( ) ; statement . execute ( dbStructureUpdate ) ; } catch ( SQLException ex ) { throw new DatabaseException ( String . format ( "Unable to upgrade the database schema from %s to %s" , currentDbVersion . toString ( ) , appExpectedVersion . toString ( ) ) , ex ) ; } finally { DBUtils . closeStatement ( statement ) ; } } catch ( IOException ex ) { final String msg = String . format ( "Upgrade SQL file does not exist: %s" , updateFile ) ; throw new DatabaseException ( msg , ex ) ; } } else { final int e0 = Integer . parseInt ( appExpectedVersion . getVersionParts ( ) . get ( 0 ) ) ; final int c0 = Integer . parseInt ( currentDbVersion . getVersionParts ( ) . get ( 0 ) ) ; final int e1 = Integer . parseInt ( appExpectedVersion . getVersionParts ( ) . get ( 1 ) ) ; final int c1 = Integer . parseInt ( currentDbVersion . getVersionParts ( ) . get ( 1 ) ) ; // CSOFF : EmptyBlock
if ( e0 == c0 && e1 < c1 ) { LOGGER . warn ( "A new version of dependency-check is available; consider upgrading" ) ; settings . setBoolean ( Settings . KEYS . AUTO_UPDATE , false ) ; } else if ( e0 == c0 && e1 == c1 ) { // do nothing - not sure how we got here , but just in case . . .
} else { LOGGER . error ( "The database schema must be upgraded to use this version of dependency-check. Please see {} for more information." , UPGRADE_HELP_URL ) ; throw new DatabaseException ( "Database schema is out of date" ) ; } // CSON : EmptyBlock
} |
public class LocalContentUriThumbnailFetchProducer { /** * stored thumbnails . */
private @ Nullable EncodedImage getThumbnail ( ResizeOptions resizeOptions , int imageId ) throws IOException { } } | int thumbnailKind = getThumbnailKind ( resizeOptions ) ; if ( thumbnailKind == NO_THUMBNAIL ) { return null ; } Cursor thumbnailCursor = null ; try { thumbnailCursor = MediaStore . Images . Thumbnails . queryMiniThumbnail ( mContentResolver , imageId , thumbnailKind , THUMBNAIL_PROJECTION ) ; if ( thumbnailCursor == null ) { return null ; } thumbnailCursor . moveToFirst ( ) ; if ( thumbnailCursor . getCount ( ) > 0 ) { final String thumbnailUri = thumbnailCursor . getString ( thumbnailCursor . getColumnIndex ( MediaStore . Images . Thumbnails . DATA ) ) ; if ( new File ( thumbnailUri ) . exists ( ) ) { return getEncodedImage ( new FileInputStream ( thumbnailUri ) , getLength ( thumbnailUri ) ) ; } } } finally { if ( thumbnailCursor != null ) { thumbnailCursor . close ( ) ; } } return null ; |
public class XmlConfigurationPersister { /** * { @ inheritDoc } */
@ Override public PersistenceResource store ( final ModelNode model , Set < PathAddress > affectedAddresses ) throws ConfigurationPersistenceException { } } | return new FilePersistenceResource ( model , fileName , this ) ; |
public class TxDistributionInterceptor { /** * If we are within one transaction we won ' t do any replication as replication would only be performed at commit
* time . If the operation didn ' t originate locally we won ' t do any replication either . */
private Object handleTxWriteCommand ( InvocationContext ctx , AbstractDataWriteCommand command , Object key ) throws Throwable { } } | try { if ( ! ctx . isOriginLocal ( ) ) { LocalizedCacheTopology cacheTopology = checkTopologyId ( command ) ; // Ignore any remote command when we aren ' t the owner
if ( ! cacheTopology . isSegmentWriteOwner ( command . getSegment ( ) ) ) { return null ; } } CacheEntry entry = ctx . lookupEntry ( command . getKey ( ) ) ; if ( entry == null ) { if ( isLocalModeForced ( command ) || command . hasAnyFlag ( FlagBitSets . SKIP_REMOTE_LOOKUP ) || ! needsPreviousValue ( ctx , command ) ) { // in transactional mode , we always need the entry wrapped
entryFactory . wrapExternalEntry ( ctx , key , null , false , true ) ; } else { // we need to retrieve the value locally regardless of load type ; in transactional mode all operations
// execute on origin
// Also , operations that need value on backup [ delta write ] need to do the remote lookup even on
// non - origin
Object result = asyncInvokeNext ( ctx , command , remoteGetSingleKey ( ctx , command , command . getKey ( ) , true ) ) ; return makeStage ( result ) . andFinally ( ctx , command , ( rCtx , rCommand , rv , t ) -> updateMatcherForRetry ( ( WriteCommand ) rCommand ) ) ; } } // already wrapped , we can continue
return invokeNextAndFinally ( ctx , command , ( rCtx , rCommand , rv , t ) -> updateMatcherForRetry ( ( WriteCommand ) rCommand ) ) ; } catch ( Throwable t ) { updateMatcherForRetry ( command ) ; throw t ; } |
public class BoxException { /** * Gets the server response as a BoxError .
* @ return the response as a BoxError , or null if the response cannot be converted . */
public BoxError getAsBoxError ( ) { } } | try { BoxError error = new BoxError ( ) ; error . createFromJson ( getResponse ( ) ) ; return error ; } catch ( Exception e ) { return null ; } |
public class ShareResourcesImpl { /** * Delete a share .
* It mirrors to the following Smartsheet REST API method :
* DELETE / workspaces / { workspaceId } / shares / { shareId }
* DELETE / sheets / { sheetId } / shares / { shareId }
* DELETE / sights / { sheetId } / shares / { shareId }
* DELETE / reports / { reportId } / shares / { shareId }
* Exceptions :
* InvalidRequestException : if there is any problem with the REST API request
* AuthorizationException : if there is any problem with the REST API authorization ( access token )
* ResourceNotFoundException : if the resource can not be found
* ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting )
* SmartsheetRestException : if there is any other REST API related error occurred during the operation
* SmartsheetException : if there is any other error occurred during the operation
* @ param objectId the ID of the object to share
* @ param shareId the ID of the share to delete
* @ throws SmartsheetException the smartsheet exception */
public void deleteShare ( long objectId , String shareId ) throws SmartsheetException { } } | this . deleteResource ( getMasterResourceType ( ) + "/" + objectId + "/shares/" + shareId , Share . class ) ; |
public class PropertiesEscape { /** * Perform a ( configurable ) Java Properties Key < strong > escape < / strong > operation on a < tt > String < / tt > input ,
* writing results to a < tt > Writer < / tt > .
* This method will perform an escape operation according to the specified
* { @ link org . unbescape . properties . PropertiesKeyEscapeLevel } argument value .
* All other < tt > String < / tt > / < tt > Writer < / tt > - based < tt > escapePropertiesKey * ( . . . ) < / tt > methods call this one with
* preconfigured < tt > level < / tt > values .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > String < / tt > to be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param level the escape level to be applied , see { @ link org . unbescape . properties . PropertiesKeyEscapeLevel } .
* @ throws IOException if an input / output exception occurs */
public static void escapePropertiesKey ( final String text , final Writer writer , final PropertiesKeyEscapeLevel level ) throws IOException { } } | if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } PropertiesKeyEscapeUtil . escape ( new InternalStringReader ( text ) , writer , level ) ; |
public class Hermes { /** * For a given interface and locale , retrieves the GWT i18n interface as a
* dynamic proxy for use on the server - side . If no locale is given , the
* default properties file will be loaded . This method caches proxy classes
* that it has created so it is safe to call multiple times . Locale IDs
* are case - sensitive ( i . e . en _ us is not the same as en _ US ) .
* @ param clazz The GWT i18n interface to get the proxy for .
* @ param localeStr The locale ID string for the locale being requested .
* @ return A dynamic proxy representing the given GWT i18n interface and
* locale .
* @ throws IOException If an error occurs finding , opening , or reading the
* GWT properties file associated with the given interface . */
@ SuppressWarnings ( "unchecked" ) public static < T > T get ( Class < T > clazz , String localeStr ) throws IOException { } } | if ( clazz == null ) { throw new IllegalArgumentException ( "Class cannot be null." ) ; } ULocale locale = null ; if ( localeStr != null && ! localeStr . isEmpty ( ) ) { locale = ULocale . createCanonical ( localeStr ) ; } LocaleMapKey key = new LocaleMapKey ( clazz . getName ( ) , locale ) ; T proxy = ( T ) cache . get ( key ) ; if ( proxy == null ) { proxy = createProxy ( clazz , locale ) ; cache . put ( key , proxy ) ; } return proxy ; |
public class CommerceDiscountPersistenceImpl { /** * Returns the first commerce discount in the ordered set where uuid = & # 63 ; .
* @ param uuid the uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce discount
* @ throws NoSuchDiscountException if a matching commerce discount could not be found */
@ Override public CommerceDiscount findByUuid_First ( String uuid , OrderByComparator < CommerceDiscount > orderByComparator ) throws NoSuchDiscountException { } } | CommerceDiscount commerceDiscount = fetchByUuid_First ( uuid , orderByComparator ) ; if ( commerceDiscount != null ) { return commerceDiscount ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchDiscountException ( msg . toString ( ) ) ; |
public class SessionImpl { /** * This creates the first meta schema in an empty keyspace which has not been initialised yet
* @ param tx */
private void initialiseMetaConcepts ( TransactionOLTP tx ) { } } | VertexElement type = tx . addTypeVertex ( Schema . MetaSchema . THING . getId ( ) , Schema . MetaSchema . THING . getLabel ( ) , Schema . BaseType . TYPE ) ; VertexElement entityType = tx . addTypeVertex ( Schema . MetaSchema . ENTITY . getId ( ) , Schema . MetaSchema . ENTITY . getLabel ( ) , Schema . BaseType . ENTITY_TYPE ) ; VertexElement relationType = tx . addTypeVertex ( Schema . MetaSchema . RELATION . getId ( ) , Schema . MetaSchema . RELATION . getLabel ( ) , Schema . BaseType . RELATION_TYPE ) ; VertexElement resourceType = tx . addTypeVertex ( Schema . MetaSchema . ATTRIBUTE . getId ( ) , Schema . MetaSchema . ATTRIBUTE . getLabel ( ) , Schema . BaseType . ATTRIBUTE_TYPE ) ; tx . addTypeVertex ( Schema . MetaSchema . ROLE . getId ( ) , Schema . MetaSchema . ROLE . getLabel ( ) , Schema . BaseType . ROLE ) ; tx . addTypeVertex ( Schema . MetaSchema . RULE . getId ( ) , Schema . MetaSchema . RULE . getLabel ( ) , Schema . BaseType . RULE ) ; relationType . property ( Schema . VertexProperty . IS_ABSTRACT , true ) ; resourceType . property ( Schema . VertexProperty . IS_ABSTRACT , true ) ; entityType . property ( Schema . VertexProperty . IS_ABSTRACT , true ) ; relationType . addEdge ( type , Schema . EdgeLabel . SUB ) ; resourceType . addEdge ( type , Schema . EdgeLabel . SUB ) ; entityType . addEdge ( type , Schema . EdgeLabel . SUB ) ; |
public class ConfigElement { /** * Sometimes , the JACL representation of a null or empty value includes
* quotation marks . Calling this method will parse away the extra JACL
* syntax and return a real or null value
* @ param value
* an unchecked input value
* @ return the real value described by the input value */
public static String getValue ( String value ) { } } | if ( value == null ) { return null ; } String v = removeQuotes ( value . trim ( ) ) . trim ( ) ; if ( v . isEmpty ( ) ) { return null ; } return v ; |
public class LumberjackClient { /** * Creates a socket using TLS protocol and connects it
* to the specified host and port .
* @ throws IOException
* @ throws UnknownHostException */
public void connect ( String hostName , int port ) throws UnknownHostException , IOException { } } | if ( ! sslHelper . isSocketAvailable ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "creating/recreating socket connection to " + hostName + ":" + port ) ; SSLSocket socket = sslHelper . createSocket ( hostName , port ) ; in = new BufferedInputStream ( socket . getInputStream ( ) ) ; out = new BufferedOutputStream ( socket . getOutputStream ( ) ) ; // seqNumber must only be reset to 1 when connection is initialized ( as in this case )
seqNumber = 1 ; // new connection - reset lastUsedTime
lastUsedTime = System . currentTimeMillis ( ) ; } |
public class Description { /** * Returns a new builder for { @ link Description } s . */
public static Builder builder ( JCTree tree , String name , @ Nullable String link , SeverityLevel severity , String message ) { } } | return new Builder ( tree , name , link , severity , message ) ; |
public class Gradient { /** * Split a span into two by adding a knot in the middle .
* @ param n the span index */
public void splitSpan ( int n ) { } } | int x = ( xKnots [ n ] + xKnots [ n + 1 ] ) / 2 ; addKnot ( x , getColor ( x / 256.0f ) , knotTypes [ n ] ) ; rebuildGradient ( ) ; |
public class FutureImpl { /** * { @ inheritDoc } */
public R get ( long timeout , TimeUnit unit ) throws InterruptedException , ExecutionException , TimeoutException { } } | long startTime = System . currentTimeMillis ( ) ; long timeoutMillis = TimeUnit . MILLISECONDS . convert ( timeout , unit ) ; synchronized ( this . sync ) { for ( ; ; ) { if ( this . isDone ) { if ( this . isCancelled ) { throw new CancellationException ( ) ; } else if ( this . failure != null ) { throw new ExecutionException ( this . failure ) ; } else if ( this . result != null ) { return this . result ; } } else if ( System . currentTimeMillis ( ) - startTime > timeoutMillis ) { throw new TimeoutException ( ) ; } this . sync . wait ( timeoutMillis ) ; } } |
public class AnalyzedTokenReadings { /** * Used to configure the internal variable for lemma equality .
* @ return true if all { @ link AnalyzedToken } lemmas are the same .
* @ since 2.5 */
private boolean areLemmasSame ( ) { } } | String previousLemma = anTokReadings [ 0 ] . getLemma ( ) ; if ( previousLemma == null ) { for ( AnalyzedToken element : anTokReadings ) { if ( element . getLemma ( ) != null ) { return false ; } } return true ; } for ( AnalyzedToken element : anTokReadings ) { if ( ! previousLemma . equals ( element . getLemma ( ) ) ) { return false ; } } return true ; |
public class ContinuousDistributions { /** * Returns the z score of a specific pvalue for Gaussian
* Partially ported from http : / / home . online . no / ~ pjacklam / notes / invnorm / impl / karimov / StatUtil . java
* Other implementations http : / / home . online . no / ~ pjacklam / notes / invnorm / index . html # Java
* @ param p
* @ return */
public static double gaussInverseCdf ( double p ) { } } | final double P_LOW = 0.02425D ; final double P_HIGH = 1.0D - P_LOW ; final double ICDF_A [ ] = { - 3.969683028665376e+01 , 2.209460984245205e+02 , - 2.759285104469687e+02 , 1.383577518672690e+02 , - 3.066479806614716e+01 , 2.506628277459239e+00 } ; final double ICDF_B [ ] = { - 5.447609879822406e+01 , 1.615858368580409e+02 , - 1.556989798598866e+02 , 6.680131188771972e+01 , - 1.328068155288572e+01 } ; final double ICDF_C [ ] = { - 7.784894002430293e-03 , - 3.223964580411365e-01 , - 2.400758277161838e+00 , - 2.549732539343734e+00 , 4.374664141464968e+00 , 2.938163982698783e+00 } ; final double ICDF_D [ ] = { 7.784695709041462e-03 , 3.224671290700398e-01 , 2.445134137142996e+00 , 3.754408661907416e+00 } ; // Define break - points .
// variable for result
double z ; if ( p == 0 ) { z = Double . NEGATIVE_INFINITY ; } else if ( p == 1 ) { z = Double . POSITIVE_INFINITY ; } else if ( Double . isNaN ( p ) || p < 0 || p > 1 ) { z = Double . NaN ; } else if ( p < P_LOW ) { // Rational approximation for lower region :
double q = Math . sqrt ( - 2 * Math . log ( p ) ) ; z = ( ( ( ( ( ICDF_C [ 0 ] * q + ICDF_C [ 1 ] ) * q + ICDF_C [ 2 ] ) * q + ICDF_C [ 3 ] ) * q + ICDF_C [ 4 ] ) * q + ICDF_C [ 5 ] ) / ( ( ( ( ICDF_D [ 0 ] * q + ICDF_D [ 1 ] ) * q + ICDF_D [ 2 ] ) * q + ICDF_D [ 3 ] ) * q + 1 ) ; } else if ( P_HIGH < p ) { // Rational approximation for upper region :
double q = Math . sqrt ( - 2 * Math . log ( 1 - p ) ) ; z = - ( ( ( ( ( ICDF_C [ 0 ] * q + ICDF_C [ 1 ] ) * q + ICDF_C [ 2 ] ) * q + ICDF_C [ 3 ] ) * q + ICDF_C [ 4 ] ) * q + ICDF_C [ 5 ] ) / ( ( ( ( ICDF_D [ 0 ] * q + ICDF_D [ 1 ] ) * q + ICDF_D [ 2 ] ) * q + ICDF_D [ 3 ] ) * q + 1 ) ; } else { // Rational approximation for central region :
double q = p - 0.5D ; double r = q * q ; z = ( ( ( ( ( ICDF_A [ 0 ] * r + ICDF_A [ 1 ] ) * r + ICDF_A [ 2 ] ) * r + ICDF_A [ 3 ] ) * r + ICDF_A [ 4 ] ) * r + ICDF_A [ 5 ] ) * q / ( ( ( ( ( ICDF_B [ 0 ] * r + ICDF_B [ 1 ] ) * r + ICDF_B [ 2 ] ) * r + ICDF_B [ 3 ] ) * r + ICDF_B [ 4 ] ) * r + 1 ) ; } return z ; |
public class TransformerHandlerImpl { /** * Report the end of a CDATA section .
* @ throws SAXException The application may raise an exception .
* @ see # startCDATA */
public void endCDATA ( ) throws SAXException { } } | if ( DEBUG ) System . out . println ( "TransformerHandlerImpl#endCDATA" ) ; if ( null != m_lexicalHandler ) { m_lexicalHandler . endCDATA ( ) ; } |
public class Config { /** * Returns the { @ link PNCounterConfig } for the given name , creating one
* if necessary and adding it to the collection of known configurations .
* The configuration is found by matching the configuration name
* pattern to the provided { @ code name } without the partition qualifier
* ( the part of the name after { @ code ' @ ' } ) .
* If no configuration matches , it will create one by cloning the
* { @ code " default " } configuration and add it to the configuration
* collection .
* This method is intended to easily and fluently create and add
* configurations more specific than the default configuration without
* explicitly adding it by invoking
* { @ link # addPNCounterConfig ( PNCounterConfig ) } .
* Because it adds new configurations if they are not already present ,
* this method is intended to be used before this config is used to
* create a hazelcast instance . Afterwards , newly added configurations
* may be ignored .
* @ param name name of the PN counter config
* @ return the PN counter configuration
* @ throws ConfigurationException if ambiguous configurations are found
* @ see StringPartitioningStrategy # getBaseName ( java . lang . String )
* @ see # setConfigPatternMatcher ( ConfigPatternMatcher )
* @ see # getConfigPatternMatcher ( ) */
public PNCounterConfig getPNCounterConfig ( String name ) { } } | return ConfigUtils . getConfig ( configPatternMatcher , pnCounterConfigs , name , PNCounterConfig . class ) ; |
public class UpdateDefaultBranchRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateDefaultBranchRequest updateDefaultBranchRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateDefaultBranchRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDefaultBranchRequest . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( updateDefaultBranchRequest . getDefaultBranchName ( ) , DEFAULTBRANCHNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Hierarchy { /** * Find a method in given class .
* @ param javaClass
* the class
* @ param methodName
* the name of the method
* @ param methodSig
* the signature of the method
* @ return the JavaClassAndMethod , or null if no such method exists in the
* class */
@ Deprecated public static @ CheckForNull JavaClassAndMethod findConcreteMethod ( JavaClass javaClass , String methodName , String methodSig ) { } } | if ( DEBUG_METHOD_LOOKUP ) { System . out . println ( "Check " + javaClass . getClassName ( ) ) ; } Method [ ] methodList = javaClass . getMethods ( ) ; for ( Method method : methodList ) { if ( method . getName ( ) . equals ( methodName ) && method . getSignature ( ) . equals ( methodSig ) && accessFlagsAreConcrete ( method . getAccessFlags ( ) ) ) { JavaClassAndMethod m = new JavaClassAndMethod ( javaClass , method ) ; return m ; } } if ( DEBUG_METHOD_LOOKUP ) { System . out . println ( "\t==> NOT FOUND" ) ; } return null ; |
public class CmsStaticExportManager { /** * Returns < code > true < / code > if the given VFS resource that is located under the
* given site root should be transported through a secure channel . < p >
* @ param cms the current users OpenCms context
* @ param vfsName the VFS resource name to check
* @ param siteRoot the site root where the the VFS resource should be read
* @ return < code > true < / code > if the given VFS resource should be transported through a secure channel
* @ see # isSecureLink ( CmsObject , String ) */
public boolean isSecureLink ( CmsObject cms , String vfsName , String siteRoot ) { } } | return isSecureLink ( cms , vfsName , siteRoot , false ) ; |
public class AppServiceEnvironmentsInner { /** * Get all multi - role pools .
* Get all multi - role pools .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service Environment .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < WorkerPoolResourceInner > > listMultiRolePoolsAsync ( final String resourceGroupName , final String name , final ListOperationCallback < WorkerPoolResourceInner > serviceCallback ) { } } | return AzureServiceFuture . fromPageResponse ( listMultiRolePoolsSinglePageAsync ( resourceGroupName , name ) , new Func1 < String , Observable < ServiceResponse < Page < WorkerPoolResourceInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < WorkerPoolResourceInner > > > call ( String nextPageLink ) { return listMultiRolePoolsNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ; |
public class LabelService { /** * Deletes the label with the given key .
* @ param key The key of the label to delete
* @ return This object */
public LabelService delete ( String key ) { } } | HTTP . DELETE ( String . format ( "/v2/labels/%s.json" , encode ( key ) ) ) ; return this ; |
public class AndroidEventBuilderHelper { /** * Checks whether or not the device is currently plugged in and charging , or null if unknown .
* @ param ctx Android application context
* @ return whether or not the device is currently plugged in and charging , or null if unknown */
protected static Boolean isCharging ( Context ctx ) { } } | try { Intent intent = ctx . registerReceiver ( null , new IntentFilter ( Intent . ACTION_BATTERY_CHANGED ) ) ; if ( intent == null ) { return null ; } int plugged = intent . getIntExtra ( BatteryManager . EXTRA_PLUGGED , - 1 ) ; return plugged == BatteryManager . BATTERY_PLUGGED_AC || plugged == BatteryManager . BATTERY_PLUGGED_USB ; } catch ( Exception e ) { Log . e ( TAG , "Error getting device charging state." , e ) ; return null ; } |
public class AuthorizationCodeHandler { /** * This method handle the authorization code ; it ' s validated the response
* state and call the server to get the tokens using the authorization code .
* @ param req
* @ param res
* @ param authzCode
* @ return */
public ProviderAuthenticationResult handleAuthorizationCode ( HttpServletRequest req , HttpServletResponse res , String authzCode , String responseState , ConvergedClientConfig clientConfig ) { } } | String clientId = clientConfig . getClientId ( ) ; OidcClientRequest oidcClientRequest = ( OidcClientRequest ) req . getAttribute ( ClientConstants . ATTRIB_OIDC_CLIENT_REQUEST ) ; ProviderAuthenticationResult oidcResult = null ; oidcResult = authenticatorUtil . verifyResponseState ( req , res , responseState , clientConfig ) ; if ( oidcResult != null ) { return oidcResult ; // only if something bad happened , otherwise proceed to exchange auth code for tokens .
} if ( ! OIDCClientAuthenticatorUtil . checkHttpsRequirement ( clientConfig , clientConfig . getTokenEndpointUrl ( ) ) ) { Tr . error ( tc , "OIDC_CLIENT_URL_PROTOCOL_NOT_HTTPS" , clientConfig . getTokenEndpointUrl ( ) ) ; oidcResult = new ProviderAuthenticationResult ( AuthResult . SEND_401 , HttpServletResponse . SC_UNAUTHORIZED ) ; return oidcResult ; } String redirect_url = authenticatorUtil . setRedirectUrlIfNotDefined ( req , clientConfig ) ; if ( ! OIDCClientAuthenticatorUtil . checkHttpsRequirement ( clientConfig , redirect_url ) ) { Tr . error ( tc , "OIDC_CLIENT_URL_PROTOCOL_NOT_HTTPS" , redirect_url ) ; oidcResult = new ProviderAuthenticationResult ( AuthResult . SEND_401 , HttpServletResponse . SC_UNAUTHORIZED ) ; return oidcResult ; } SSLSocketFactory sslSocketFactory = null ; try { sslSocketFactory = getSSLSocketFactory ( clientConfig . getTokenEndpointUrl ( ) , clientConfig . getSSLConfigurationName ( ) , clientId ) ; } catch ( SSLException e ) { Tr . error ( tc , "OIDC_CLIENT_HTTPS_WITH_SSLCONTEXT_NULL" , new Object [ ] { e . getMessage ( ) != null ? e . getMessage ( ) : "invalid ssl context" , clientConfig . getClientId ( ) } ) ; return new ProviderAuthenticationResult ( AuthResult . SEND_401 , HttpServletResponse . SC_UNAUTHORIZED ) ; } // go get the tokens and validate them .
try { String url = clientConfig . getTokenEndpointUrl ( ) ; if ( url == null || url . length ( ) == 0 ) { throw new MalformedURLException ( "MalformedURLException" ) ; } HashMap < String , String > tokens = oidcClientUtil . getTokensFromAuthzCode ( url , clientId , clientConfig . getClientSecret ( ) , redirect_url , authzCode , clientConfig . getGrantType ( ) , sslSocketFactory , clientConfig . isHostNameVerificationEnabled ( ) , clientConfig . getTokenEndpointAuthMethod ( ) , OIDCClientAuthenticatorUtil . getResources ( clientConfig ) , clientConfig . getTokenRequestParams ( ) , clientConfig . getUseSystemPropertiesForHttpClientConnections ( ) ) ; oidcClientRequest . setTokenType ( ClientConstants . TYPE_ID_TOKEN ) ; // this has a LOT of dependencies .
oidcResult = jose4jUtil . createResultWithJose4J ( responseState , tokens , clientConfig , oidcClientRequest ) ; // if tokens were valid , go get the userinfo if configured to do so , and update the authentication result to include it .
UserInfoHelper uih = new UserInfoHelper ( clientConfig ) ; if ( uih . willRetrieveUserInfo ( ) ) { OidcTokenImplBase idToken = null ; if ( oidcResult . getCustomProperties ( ) != null ) { idToken = ( OidcTokenImplBase ) oidcResult . getCustomProperties ( ) . get ( Constants . ID_TOKEN_OBJECT ) ; } String subjFromIdToken = null ; if ( idToken != null ) { subjFromIdToken = idToken . getSubject ( ) ; } if ( subjFromIdToken != null ) { uih . getUserInfo ( oidcResult , sslSocketFactory , tokens . get ( Constants . ACCESS_TOKEN ) , subjFromIdToken ) ; } } } catch ( BadPostRequestException e ) { Tr . error ( tc , "OIDC_CLIENT_TOKEN_REQUEST_FAILURE" , new Object [ ] { e . getErrorMessage ( ) , clientId , clientConfig . getTokenEndpointUrl ( ) } ) ; sendErrorJSON ( res , e . getStatusCode ( ) , "invalid_request" , e . getErrorMessage ( ) ) ; oidcResult = new ProviderAuthenticationResult ( AuthResult . FAILURE , e . getStatusCode ( ) ) ; } catch ( Exception e ) { Tr . error ( tc , "OIDC_CLIENT_TOKEN_REQUEST_FAILURE" , new Object [ ] { e . getLocalizedMessage ( ) , clientId , clientConfig . getTokenEndpointUrl ( ) } ) ; oidcResult = new ProviderAuthenticationResult ( AuthResult . SEND_401 , HttpServletResponse . SC_UNAUTHORIZED ) ; } return oidcResult ; |
public class FixedRedirectCookieAuthenticator { /** * Processes the login request .
* @ param request
* The current request .
* @ param response
* The current response . */
protected void login ( final Request request , final Response response ) { } } | // Login detected
final Form form = new Form ( request . getEntity ( ) ) ; final Parameter identifier = form . getFirst ( this . getIdentifierFormName ( ) ) ; final Parameter secret = form . getFirst ( this . getSecretFormName ( ) ) ; // Set credentials
final ChallengeResponse cr = new ChallengeResponse ( this . getScheme ( ) , identifier != null ? identifier . getValue ( ) : null , secret != null ? secret . getValue ( ) : null ) ; request . setChallengeResponse ( cr ) ; this . log . info ( "calling attemptRedirect after login" ) ; // Attempt to redirect
this . attemptRedirect ( request , response , form ) ; |
public class BlockMasterInfo { /** * Creates a new instance of { @ link BlockMasterInfo } from a proto representation .
* @ param info the proto representation of a block master information
* @ return the instance */
public static BlockMasterInfo fromProto ( alluxio . grpc . BlockMasterInfo info ) { } } | return new BlockMasterInfo ( ) . setCapacityBytes ( info . getCapacityBytes ( ) ) . setCapacityBytesOnTiers ( info . getCapacityBytesOnTiersMap ( ) ) . setFreeBytes ( info . getFreeBytes ( ) ) . setLiveWorkerNum ( info . getLiveWorkerNum ( ) ) . setLostWorkerNum ( info . getLostWorkerNum ( ) ) . setUsedBytes ( info . getUsedBytes ( ) ) . setUsedBytesOnTiers ( info . getUsedBytesOnTiersMap ( ) ) ; |
public class AdminKeymatchAction { public static OptionalEntity < KeyMatch > getEntity ( final CreateForm form , final String username , final long currentTime ) { } } | switch ( form . crudMode ) { case CrudMode . CREATE : return OptionalEntity . of ( new KeyMatch ( ) ) . map ( entity -> { entity . setCreatedBy ( username ) ; entity . setCreatedTime ( currentTime ) ; return entity ; } ) ; case CrudMode . EDIT : if ( form instanceof EditForm ) { return ComponentUtil . getComponent ( KeyMatchService . class ) . getKeyMatch ( ( ( EditForm ) form ) . id ) ; } break ; default : break ; } return OptionalEntity . empty ( ) ; |
public class CheckEJBAppConfigHelper { /** * Checks whether validation messages should be logged or not . < p >
* This is determined by checking the following :
* < ul >
* < li > checkEJBApplicationConfiguration system property
* < li > Metadata trace component is set to the debug or lower level
* < li > EJBContainer trace component is set to the debug or lower level
* < li > Injection trace component is set to the debug or lower level
* < li > development mode server configuration
* < / ul >
* If any of these conditions are true , then this method will return true .
* Otherwise it will return false . < p >
* In general , an application should be allowed to start and run as long
* as configuration issues found are not likely to cause the application
* to behave unexpectedly or result in data integrity issues . < p >
* Those configuration issues that may occur additional performance
* overhead to determine should use this method to determine if
* they should be checked for and logged as a warning . < p >
* @ return true if the system property is set to true OR if
* < code > tc . isDebugEnabled ( ) < / code > is true OR development mode server */
public static boolean isValidationLoggable ( ) { } } | if ( svCheckEJBAppConfig || svDevelopmentMode || ( TraceComponent . isAnyTracingEnabled ( ) && ( metadataTc . isDebugEnabled ( ) || ejbTc . isDebugEnabled ( ) || injTc . isDebugEnabled ( ) ) ) ) { return true ; } return false ; |
public class CmsContentService { /** * Reads the content definition for the given resource and locale . < p >
* @ param file the resource file
* @ param content the XML content
* @ param entityId the entity id
* @ param clientId the container element client id if available
* @ param locale the content locale
* @ param newLocale if the locale content should be created as new
* @ param mainLocale the main language to copy in case the element language node does not exist yet
* @ param editedLocaleEntity the edited locale entity
* @ param settingPresets the presets for settings
* @ return the content definition
* @ throws CmsException if something goes wrong */
private CmsContentDefinition readContentDefinition ( CmsFile file , CmsXmlContent content , String entityId , String clientId , Locale locale , boolean newLocale , Locale mainLocale , CmsEntity editedLocaleEntity , Map < String , String > settingPresets ) throws CmsException { } } | long timer = 0 ; if ( LOG . isDebugEnabled ( ) ) { timer = System . currentTimeMillis ( ) ; } CmsObject cms = getCmsObject ( ) ; List < Locale > availableLocalesList = OpenCms . getLocaleManager ( ) . getAvailableLocales ( cms , file ) ; if ( ! availableLocalesList . contains ( locale ) ) { availableLocalesList . retainAll ( content . getLocales ( ) ) ; List < Locale > defaultLocales = OpenCms . getLocaleManager ( ) . getDefaultLocales ( cms , file ) ; Locale replacementLocale = OpenCms . getLocaleManager ( ) . getBestMatchingLocale ( locale , defaultLocales , availableLocalesList ) ; LOG . info ( "Can't edit locale " + locale + " of file " + file . getRootPath ( ) + " because it is not configured as available locale. Using locale " + replacementLocale + " instead." ) ; locale = replacementLocale ; entityId = CmsContentDefinition . uuidToEntityId ( file . getStructureId ( ) , locale . toString ( ) ) ; } if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( entityId ) ) { entityId = CmsContentDefinition . uuidToEntityId ( file . getStructureId ( ) , locale . toString ( ) ) ; } boolean performedAutoCorrection = checkAutoCorrection ( cms , content ) ; if ( performedAutoCorrection ) { content . initDocument ( ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_TAKE_UNMARSHALING_TIME_1 , "" + ( System . currentTimeMillis ( ) - timer ) ) ) ; } CmsContentTypeVisitor visitor = new CmsContentTypeVisitor ( cms , file , locale ) ; if ( LOG . isDebugEnabled ( ) ) { timer = System . currentTimeMillis ( ) ; } visitor . visitTypes ( content . getContentDefinition ( ) , getWorkplaceLocale ( cms ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_TAKE_VISITING_TYPES_TIME_1 , "" + ( System . currentTimeMillis ( ) - timer ) ) ) ; } CmsEntity entity = null ; Map < String , String > syncValues = new HashMap < String , String > ( ) ; Collection < String > skipPaths = new HashSet < String > ( ) ; evaluateSyncLocaleValues ( content , syncValues , skipPaths ) ; if ( content . hasLocale ( locale ) && newLocale ) { // a new locale is requested , so remove the present one
content . removeLocale ( locale ) ; } if ( ! content . hasLocale ( locale ) ) { if ( ( mainLocale != null ) && content . hasLocale ( mainLocale ) ) { content . copyLocale ( mainLocale , locale ) ; } else { content . addLocale ( cms , locale ) ; } // sync the locale values
if ( ! visitor . getLocaleSynchronizations ( ) . isEmpty ( ) && ( content . getLocales ( ) . size ( ) > 1 ) ) { for ( Locale contentLocale : content . getLocales ( ) ) { if ( ! contentLocale . equals ( locale ) ) { content . synchronizeLocaleIndependentValues ( cms , skipPaths , contentLocale ) ; } } } } Element element = content . getLocaleNode ( locale ) ; if ( LOG . isDebugEnabled ( ) ) { timer = System . currentTimeMillis ( ) ; } entity = readEntity ( content , element , locale , entityId , "" , getTypeUri ( content . getContentDefinition ( ) ) , visitor , false , editedLocaleEntity ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_TAKE_READING_ENTITY_TIME_1 , "" + ( System . currentTimeMillis ( ) - timer ) ) ) ; } List < String > contentLocales = new ArrayList < String > ( ) ; for ( Locale contentLocale : content . getLocales ( ) ) { contentLocales . add ( contentLocale . toString ( ) ) ; } Locale workplaceLocale = OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) ; TreeMap < String , String > availableLocales = new TreeMap < String , String > ( ) ; for ( Locale availableLocale : OpenCms . getLocaleManager ( ) . getAvailableLocales ( cms , file ) ) { availableLocales . put ( availableLocale . toString ( ) , availableLocale . getDisplayName ( workplaceLocale ) ) ; } String title = cms . readPropertyObject ( file , CmsPropertyDefinition . PROPERTY_TITLE , false ) . getValue ( ) ; try { CmsGallerySearchResult searchResult = CmsGallerySearch . searchById ( cms , file . getStructureId ( ) , locale ) ; title = searchResult . getTitle ( ) ; } catch ( CmsException e ) { LOG . warn ( e . getLocalizedMessage ( ) , e ) ; } String typeName = OpenCms . getResourceManager ( ) . getResourceType ( file . getTypeId ( ) ) . getTypeName ( ) ; boolean autoUnlock = OpenCms . getWorkplaceManager ( ) . shouldAcaciaUnlock ( ) ; Map < String , CmsEntity > entities = new HashMap < String , CmsEntity > ( ) ; entities . put ( entityId , entity ) ; Map < String , CmsAttributeConfiguration > attrConfig = visitor . getAttributeConfigurations ( ) ; Map < String , CmsType > types = visitor . getTypes ( ) ; List < CmsTabInfo > tabInfos = visitor . getTabInfos ( ) ; if ( clientId != null ) { CmsContainerElementBean containerElement = getSessionCache ( ) . getCacheContainerElement ( clientId ) ; I_CmsFormatterBean formatter = getFormatterForElement ( containerElement ) ; if ( ( formatter != null ) && formatter . isAllowsSettingsInEditor ( ) && ( formatter . getSettings ( ) != null ) && ! formatter . getSettings ( ) . isEmpty ( ) ) { Map < String , CmsXmlContentProperty > settingsConfig = OpenCms . getADEManager ( ) . getFormatterSettings ( cms , formatter , containerElement . getResource ( ) , locale , getRequest ( ) ) ; com . google . common . base . Supplier < CmsXmlContent > contentSupplier = Suppliers . memoize ( ( ) -> { try { return CmsXmlContentFactory . unmarshal ( cms , cms . readFile ( containerElement . getResource ( ) ) ) ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; return null ; } } ) ; settingsConfig = CmsXmlContentPropertyHelper . resolveMacrosForPropertyInfo ( cms , null , containerElement . getResource ( ) , contentSupplier , CmsElementUtil . createStringTemplateSource ( formatter , contentSupplier ) , settingsConfig ) ; CmsMessages messages = OpenCms . getWorkplaceManager ( ) . getMessages ( m_workplaceLocale ) ; List < I_CmsFormatterBean > nestedFormatters = formatter . hasNestedFormatterSettings ( ) ? OpenCms . getADEManager ( ) . getNestedFormatters ( cms , containerElement . getResource ( ) , locale , getRequest ( ) ) : Collections . emptyList ( ) ; String firstContentAttributeName = types . get ( entity . getTypeName ( ) ) . getAttributeNames ( ) . iterator ( ) . next ( ) ; List < String > addedVisibleAttrs = addSettingsAttributes ( attrConfig , settingsConfig , nestedFormatters , messages , locale , settingPresets ) ; addSettingsTypes ( entity . getTypeName ( ) , types , settingsConfig , nestedFormatters ) ; if ( editedLocaleEntity != null ) { transferSettingValues ( editedLocaleEntity , entity ) ; } else { addSettingsValues ( entity , containerElement , nestedFormatters ) ; } if ( tabInfos . isEmpty ( ) ) { tabInfos . add ( new CmsTabInfo ( Messages . get ( ) . getBundle ( workplaceLocale ) . key ( Messages . GUI_CONTENT_TAB_LABEL_0 ) , "content" , firstContentAttributeName . substring ( entity . getTypeName ( ) . length ( ) + 1 ) , false , null ) ) ; } if ( addedVisibleAttrs . size ( ) > 0 ) { tabInfos . add ( new CmsTabInfo ( Messages . get ( ) . getBundle ( workplaceLocale ) . key ( Messages . GUI_SETTINGS_TAB_LABEL_0 ) , CmsContentDefinition . SETTINGS_TAB_ID , CmsFileUtil . removeLeadingSeparator ( addedVisibleAttrs . iterator ( ) . next ( ) ) , false , Messages . get ( ) . getBundle ( workplaceLocale ) . key ( Messages . GUI_SETTINGS_TAB_DESCRIPTION_0 ) ) ) ; } } } return new CmsContentDefinition ( entityId , entities , visitor . getAttributeConfigurations ( ) , visitor . getWidgetConfigurations ( ) , visitor . getComplexWidgetData ( ) , visitor . getTypes ( ) , visitor . getTabInfos ( ) , locale . toString ( ) , contentLocales , availableLocales , visitor . getLocaleSynchronizations ( ) , syncValues , skipPaths , title , cms . getSitePath ( file ) , typeName , CmsIconUtil . getIconClasses ( CmsIconUtil . getDisplayType ( cms , file ) , file . getName ( ) , false ) , performedAutoCorrection , autoUnlock , getChangeHandlerScopes ( content . getContentDefinition ( ) ) ) ; |
public class CassandraExecutor { /** * Delete the specified properties if < code > propNames < / code > is not null or empty , otherwise , delete the whole record .
* @ param targetClass
* @ param deletingPropNames
* @ param id */
@ SafeVarargs public final ResultSet delete ( final Class < ? > targetClass , final Collection < String > deletingPropNames , final Object ... ids ) { } } | return delete ( targetClass , deletingPropNames , ids2Cond ( targetClass , ids ) ) ; |
public class SlidingEventTimeWindows { /** * Creates a new { @ code SlidingEventTimeWindows } { @ link WindowAssigner } that assigns
* elements to sliding time windows based on the element timestamp .
* @ param size The size of the generated windows .
* @ param slide The slide interval of the generated windows .
* @ return The time policy . */
public static SlidingEventTimeWindows of ( Time size , Time slide ) { } } | return new SlidingEventTimeWindows ( size . toMilliseconds ( ) , slide . toMilliseconds ( ) , 0 ) ; |
public class CoreStatusResponse { /** * Returns the date the server ( core module ) was last reloaded . < p >
* If either the date or time property is < code > null < / code > ( e . g . on Asterisk prior to 1.6.3 ) this method
* returns < code > null < / code > .
* @ param tz the time zone of the Asterisk server , < code > null < / code > to use the default time zone .
* @ return the date the server ( core module ) was last reloaded or < code > null < / code > if not available .
* @ see # getCoreReloadDate ( )
* @ see # getCoreReloadTime ( ) */
public Date getCoreReloadDateTimeAsDate ( TimeZone tz ) { } } | if ( coreReloadDate == null || coreReloadTime == null ) { return null ; } return DateUtil . parseDateTime ( coreReloadDate + " " + coreReloadTime , tz ) ; |
public class QueryObjectsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( QueryObjectsRequest queryObjectsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( queryObjectsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( queryObjectsRequest . getPipelineId ( ) , PIPELINEID_BINDING ) ; protocolMarshaller . marshall ( queryObjectsRequest . getQuery ( ) , QUERY_BINDING ) ; protocolMarshaller . marshall ( queryObjectsRequest . getSphere ( ) , SPHERE_BINDING ) ; protocolMarshaller . marshall ( queryObjectsRequest . getMarker ( ) , MARKER_BINDING ) ; protocolMarshaller . marshall ( queryObjectsRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RhythmicalContextConfig { @ Override protected void processAnnotations ( Set < WebXml > fragments , boolean handlesTypesOnly , Map < String , JavaClassCacheEntry > javaClassCache ) { } } | if ( isAnnotationHandlingDetect ( ) ) { super . processAnnotations ( fragments , handlesTypesOnly , javaClassCache ) ; } |
public class ControlBeanContextSupport { /** * Serialize all serializable children ( unless this BeanContext has a peer ) . Any
* children which are not serializable not be present upon deserialization . Also
* serialize any BeanContextMembership listeners which are serializable .
* @ param out ObjectOutputStream to serialize to . */
private synchronized void writeObject ( ObjectOutputStream out ) throws IOException { } } | // todo : for multithreaded usage this block needs to be synchronized
out . defaultWriteObject ( ) ; // spec : only write children if not using a peer
if ( this . equals ( getPeer ( ) ) ) { writeChildren ( out ) ; } else { out . writeInt ( 0 ) ; } // write event handlers
int serializable = 0 ; for ( BeanContextMembershipListener listener : _bcMembershipListeners ) { if ( listener instanceof Serializable ) serializable ++ ; } out . writeInt ( serializable ) ; if ( serializable > 0 ) { for ( BeanContextMembershipListener listener : _bcMembershipListeners ) { if ( listener instanceof Serializable ) { out . writeObject ( listener ) ; } } } // end synchronized block |
public class WAudioRenderer { /** * Paints the given WAudio .
* @ param component the WAudio to paint .
* @ param renderContext the RenderContext to paint to . */
@ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } } | WAudio audioComponent = ( WAudio ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; Audio [ ] audio = audioComponent . getAudio ( ) ; if ( audio == null || audio . length == 0 ) { return ; } WAudio . Controls controls = audioComponent . getControls ( ) ; int duration = audio [ 0 ] . getDuration ( ) ; // Check for alternative text
String alternativeText = audioComponent . getAltText ( ) ; if ( alternativeText == null ) { LOG . warn ( "Audio should have a description." ) ; alternativeText = null ; } else { alternativeText = I18nUtilities . format ( null , alternativeText ) ; } xml . appendTagOpen ( "ui:audio" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "alt" , alternativeText ) ; xml . appendOptionalAttribute ( "autoplay" , audioComponent . isAutoplay ( ) , "true" ) ; xml . appendOptionalAttribute ( "mediagroup" , audioComponent . getMediaGroup ( ) ) ; xml . appendOptionalAttribute ( "loop" , audioComponent . isLoop ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , audioComponent . isHidden ( ) , "true" ) ; xml . appendOptionalAttribute ( "disabled" , audioComponent . isDisabled ( ) , "true" ) ; xml . appendOptionalAttribute ( "toolTip" , audioComponent . getToolTip ( ) ) ; xml . appendOptionalAttribute ( "duration" , duration > 0 , duration ) ; switch ( audioComponent . getPreload ( ) ) { case NONE : xml . appendAttribute ( "preload" , "none" ) ; break ; case META_DATA : xml . appendAttribute ( "preload" , "metadata" ) ; break ; case AUTO : default : break ; } if ( controls != null && ! WAudio . Controls . NATIVE . equals ( controls ) ) { switch ( controls ) { case NONE : xml . appendAttribute ( "controls" , "none" ) ; break ; case ALL : xml . appendAttribute ( "controls" , "all" ) ; break ; case PLAY_PAUSE : xml . appendAttribute ( "controls" , "play" ) ; break ; case DEFAULT : xml . appendAttribute ( "controls" , "default" ) ; break ; default : LOG . error ( "Unknown control type: " + controls ) ; } } xml . appendClose ( ) ; String [ ] urls = audioComponent . getAudioUrls ( ) ; for ( int i = 0 ; i < urls . length ; i ++ ) { xml . appendTagOpen ( "ui:src" ) ; xml . appendUrlAttribute ( "uri" , urls [ i ] ) ; xml . appendOptionalAttribute ( "type" , audio [ i ] . getMimeType ( ) ) ; xml . appendEnd ( ) ; } xml . appendEndTag ( "ui:audio" ) ; |
public class DatabaseManager { /** * Used by in - process connections and by Servlet */
public static Session newSession ( String type , String path , String user , String password , HsqlProperties props , int timeZoneSeconds ) { } } | Database db = getDatabase ( type , path , props ) ; if ( db == null ) { return null ; } return db . connect ( user , password , timeZoneSeconds ) ; |
public class Http { /** * Executes a PATCH request .
* @ param url url of resource .
* @ param content content to be posted .
* @ param connectTimeout connection timeout in milliseconds .
* @ param readTimeout read timeout in milliseconds .
* @ return { @ link Patch } object . */
public static Patch patch ( String url , byte [ ] content , int connectTimeout , int readTimeout ) { } } | try { return new Patch ( url , content , connectTimeout , readTimeout ) ; } catch ( Exception e ) { throw new HttpException ( "Failed URL: " + url , e ) ; } |
public class AbstractConfig { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) @ Override public < T > Optional < T > getOptionalValue ( String propertyName , Class < T > propertyType ) { } } | assertNotClosed ( ) ; SourcedValue sourced = getSourcedValue ( propertyName , propertyType ) ; T value = null ; if ( sourced != null ) { value = ( T ) sourced . getValue ( ) ; } Optional < T > optional = Optional . ofNullable ( value ) ; return optional ; |
public class DAOValidatorHelper { /** * Methode qui teste si une chaine ne contient que des caracteres alphanumeriques
* @ param textChaine a tester
* @ returnStatut de contenance */
public static boolean isAlphaNumericString ( String text ) { } } | // Le Pattern representant une chaine AlphaNumerique
Pattern pattern = Pattern . compile ( "\\w+" ) ; // Test
return ( text != null ) && ( text . trim ( ) . length ( ) > 0 ) && ( pattern . matcher ( text ) . matches ( ) ) ; |
public class Range { /** * This method determines if the given { @ code value } is within this { @ link Range } from { @ link # getMin ( ) minimum } to
* { @ link # getMax ( ) maximum } .
* @ param value is the vale to check .
* @ return { @ code true } if contained ( { @ link # getMin ( ) minimum } & lt ; = { @ code value } & lt ; = { @ link # getMax ( ) maximum } ) . */
public boolean isContained ( V value ) { } } | NlsNullPointerException . checkNotNull ( "value" , value ) ; Comparator < ? super V > comparator = getComparator ( ) ; int delta ; if ( this . min != null ) { delta = comparator . compare ( value , this . min ) ; if ( delta < 0 ) { // value < min
return false ; } } if ( this . max != null ) { delta = comparator . compare ( value , this . max ) ; if ( delta > 0 ) { // value > max
return false ; } } return true ; |
public class RequestMessage { /** * @ see javax . servlet . ServletRequest # getServerPort ( ) */
@ Override public int getServerPort ( ) { } } | int port = this . request . getVirtualPort ( ) ; if ( - 1 == port && null != this . request . getHeader ( "Host" ) ) { // if Host is present , default to scheme
if ( "HTTP" . equalsIgnoreCase ( this . request . getScheme ( ) ) ) { port = 80 ; } else { port = 443 ; } } // if still not found , use the socket information
if ( - 1 == port ) { port = this . connection . getLocalPort ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getServerPort: " + port ) ; } return port ; |
public class IndexAVL { /** * Return the first node equal to the rowdata object .
* The rowdata has the same column mapping as this table .
* @ param session session object
* @ param store store object
* @ param rowdata array containing table row data
* @ return iterator */
@ Override public RowIterator findFirstRow ( Session session , PersistentStore store , Object [ ] rowdata ) { } } | NodeAVL node = findNode ( session , store , rowdata , colIndex , colIndex . length ) ; return getIterator ( session , store , node ) ; |
public class AbstrCFMLScriptTransformer { /** * Liest ein switch Statment ein
* @ return switch Statement
* @ throws TemplateException */
private final Switch switchStatement ( Data data ) throws TemplateException { } } | if ( ! data . srcCode . forwardIfCurrent ( "switch" , '(' ) ) return null ; Position line = data . srcCode . getPosition ( ) ; comments ( data ) ; Expression expr = super . expression ( data ) ; comments ( data ) ; // end )
if ( ! data . srcCode . forwardIfCurrent ( ')' ) ) throw new TemplateException ( data . srcCode , "switch statement must end with a [)]" ) ; comments ( data ) ; if ( ! data . srcCode . forwardIfCurrent ( '{' ) ) throw new TemplateException ( data . srcCode , "switch statement must have a starting [{]" ) ; Switch swit = new Switch ( expr , line , null ) ; // cases
// Node child = null ;
comments ( data ) ; while ( caseStatement ( data , swit ) ) { comments ( data ) ; } // default
if ( defaultStatement ( data , swit ) ) { comments ( data ) ; } while ( caseStatement ( data , swit ) ) { comments ( data ) ; } if ( ! data . srcCode . forwardIfCurrent ( '}' ) ) throw new TemplateException ( data . srcCode , "invalid construct in switch statement" ) ; swit . setEnd ( data . srcCode . getPosition ( ) ) ; return swit ; |
public class CmsContainerConfigurationParser { /** * Parses a single inheritance configuration from an XML content node . < p >
* @ param location the node from which to read the single configuration */
protected void parseSingleConfiguration ( I_CmsXmlContentValueLocation location ) { } } | I_CmsXmlContentValueLocation nameLoc = location . getSubValue ( N_NAME ) ; if ( nameLoc == null ) { return ; } String name = nameLoc . asString ( m_cms ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( name ) ) { return ; } List < String > ordering = null ; List < I_CmsXmlContentValueLocation > orderKeyLocs = location . getSubValues ( N_ORDERKEY ) ; if ( orderKeyLocs != null ) { ordering = new ArrayList < String > ( ) ; for ( I_CmsXmlContentValueLocation orderKeyLoc : orderKeyLocs ) { String orderKey = orderKeyLoc . asString ( m_cms ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( orderKey ) ) { ordering . add ( orderKey . trim ( ) ) ; } } } Map < String , Boolean > visibilities = new HashMap < String , Boolean > ( ) ; List < I_CmsXmlContentValueLocation > visibleLocs = location . getSubValues ( N_VISIBLE ) ; for ( I_CmsXmlContentValueLocation visibleLoc : visibleLocs ) { String visibleStr = visibleLoc . asString ( m_cms ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( visibleStr ) ) { visibilities . put ( visibleStr . trim ( ) , Boolean . TRUE ) ; } } List < I_CmsXmlContentValueLocation > hiddenLocs = location . getSubValues ( N_HIDDEN ) ; for ( I_CmsXmlContentValueLocation hiddenLoc : hiddenLocs ) { String hiddenStr = hiddenLoc . asString ( m_cms ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( hiddenStr ) ) { visibilities . put ( hiddenStr . trim ( ) , Boolean . FALSE ) ; } } List < I_CmsXmlContentValueLocation > newElementLocs = location . getSubValues ( N_NEWELEMENT ) ; Map < String , CmsContainerElementBean > newElementBeans = new HashMap < String , CmsContainerElementBean > ( ) ; for ( I_CmsXmlContentValueLocation elementLoc : newElementLocs ) { I_CmsXmlContentValueLocation keyLoc = elementLoc . getSubValue ( N_KEY ) ; String key = keyLoc . asString ( m_cms ) . trim ( ) ; I_CmsXmlContentValueLocation actualElementLoc = elementLoc . getSubValue ( N_ELEMENT ) ; I_CmsXmlContentValueLocation uriLoc = actualElementLoc . getSubValue ( N_URI ) ; CmsUUID structureId = uriLoc . asId ( m_cms ) ; if ( structureId != null ) { Map < String , String > settings = CmsXmlContentPropertyHelper . readProperties ( m_cms , actualElementLoc ) ; CmsContainerElementBean newElementBean = new CmsContainerElementBean ( structureId , CmsUUID . getNullUUID ( ) , settings , false ) ; newElementBeans . put ( key , newElementBean ) ; } } CmsContainerConfiguration config = new CmsContainerConfiguration ( ordering , visibilities , newElementBeans ) ; m_currentConfigurationGroup . put ( name , config ) ; |
public class IndyGuardsFiltersAndSignatures { /** * Guard to check if the provided Object has the same
* class as the provided Class . This method will
* return false if the Object is null . */
public static boolean sameClass ( Class c , Object o ) { } } | if ( o == null ) return false ; return o . getClass ( ) == c ; |
public class Pail { /** * returns if formats are same */
private boolean checkCombineValidity ( Pail p , CopyArgs args ) throws IOException { } } | if ( args . force ) return true ; PailSpec mine = getSpec ( ) ; PailSpec other = p . getSpec ( ) ; PailStructure structure = mine . getStructure ( ) ; boolean typesSame = structure . getType ( ) . equals ( other . getStructure ( ) . getType ( ) ) ; // can always append into a " raw " pail
if ( ! structure . getType ( ) . equals ( new byte [ 0 ] . getClass ( ) ) && ! typesSame ) throw new IllegalArgumentException ( "Cannot combine two pails of different types unless target pail is raw" ) ; // check that structure will be maintained
for ( String name : p . getUserFileNames ( ) ) { checkValidStructure ( name ) ; } return mine . getName ( ) . equals ( other . getName ( ) ) && mine . getArgs ( ) . equals ( other . getArgs ( ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.