signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Scanner { /** * Find classpath elements whose path is a prefix of another classpath element , and record the nesting . * @ param classpathElts * the classpath elements * @ param log * the log */ private void findNestedClasspathElements ( final List < SimpleEntry < String , ClasspathElement > > classpathElts , final LogNode log ) { } }
// Sort classpath elements into lexicographic order CollectionUtils . sortIfNotEmpty ( classpathElts , new Comparator < SimpleEntry < String , ClasspathElement > > ( ) { @ Override public int compare ( final SimpleEntry < String , ClasspathElement > o1 , final SimpleEntry < String , ClasspathElement > o2 ) { return o1 . getKey ( ) . compareTo ( o2 . getKey ( ) ) ; } } ) ; // Find any nesting of elements within other elements for ( int i = 0 ; i < classpathElts . size ( ) ; i ++ ) { // See if each classpath element is a prefix of any others ( if so , they will immediately follow // in lexicographic order ) final SimpleEntry < String , ClasspathElement > ei = classpathElts . get ( i ) ; final String basePath = ei . getKey ( ) ; final int basePathLen = basePath . length ( ) ; for ( int j = i + 1 ; j < classpathElts . size ( ) ; j ++ ) { final SimpleEntry < String , ClasspathElement > ej = classpathElts . get ( j ) ; final String comparePath = ej . getKey ( ) ; final int comparePathLen = comparePath . length ( ) ; boolean foundNestedClasspathRoot = false ; if ( comparePath . startsWith ( basePath ) && comparePathLen > basePathLen ) { // Require a separator after the prefix final char nextChar = comparePath . charAt ( basePathLen ) ; if ( nextChar == '/' || nextChar == '!' ) { // basePath is a path prefix of comparePath . Ensure that the nested classpath does // not contain another ' ! ' zip - separator ( since classpath scanning does not recurse // to jars - within - jars unless they are explicitly listed on the classpath ) final String nestedClasspathRelativePath = comparePath . substring ( basePathLen + 1 ) ; if ( nestedClasspathRelativePath . indexOf ( '!' ) < 0 ) { // Found a nested classpath root foundNestedClasspathRoot = true ; // Store link from prefix element to nested elements final ClasspathElement baseElement = ei . getValue ( ) ; if ( baseElement . nestedClasspathRootPrefixes == null ) { baseElement . nestedClasspathRootPrefixes = new ArrayList < > ( ) ; } baseElement . nestedClasspathRootPrefixes . add ( nestedClasspathRelativePath + "/" ) ; if ( log != null ) { log . log ( basePath + " is a prefix of the nested element " + comparePath ) ; } } } } if ( ! foundNestedClasspathRoot ) { // After the first non - match , there can be no more prefix matches in the sorted order break ; } } }
public class GregorianCalendar { /** * If the supplied millis is in daylight savings time ( DST ) and is the result of an invalid * wall clock then adjust the DST offset to ensure sensible behavior . * < p > When transitioning into DST , i . e . when the clocks spring forward ( usually by one hour ) * there is a wall clock period that is invalid , it literally doesn ' t exist . e . g . If clocks * go forward one hour at 02:00 on 9th March 2014 ( standard time ) then the wall time of * 02:00-02:59:59.999 is not a valid . The wall clock jumps straight from 01:59:59.999 to * 03:00 . The following table shows the relationship between the time in millis , the standard * time and the wall time at the point of transitioning into DST . As can be seen there is no * 02:00 in the wall time . * < pre > * Time In Millis - . . . . . x + 1h . . . . . x + 2h . . . . . x + 3h * Standard Time - . . . . . 01:00 . . . . . 02:00 . . . . . 03:00 . . . . . * Wall Time - . . . . . 01:00 . . . . . 03:00 . . . . . 04:00 . . . . . * 02:00 missing * < / pre > * < p > The calendar fields represent wall time . If the user sets the fields on the calendar so * that it is in that invalid period then this code attempts to do something sensible . It * treats 02 : MM : SS . SSS as if it is { @ code 01 : MM : SS . SSS + 1 hour } . That makes sense from both * the input calendar fields perspective and from the time in millis perspective . Of course the * result of that is that when the time is formatted in that time zone that the time is * actually 03 : MM : SS . SSS . * < pre > * Wall Time - . . . . . 01:00 . . . . . < b > 02:00 . . . . . < / b > 03:00 . . . . . 04:00 . . . . . * Time In Millis - . . . . . x + 1h . . . . . < b > x + 2h . . . . . < / b > x + 2h . . . . . x + 3h . . . . . * < / pre > * < p > The way that works is as follows . First the standard time is calculated and the DST * offset is determined . Then if the time is in DST ( the DST offset is not 0 ) but it was not in * DST an hour earlier ( or however long the DST offset is ) then it must be in that invalid * period , in which case set the DST offset to 0 . That is then subtracted from the time in * millis to produce the correct result . The following diagram illustrates the process . * < pre > * Standard Time - . . . . . 01:00 . . . . . 02:00 . . . . . 03:00 . . . . . 04:00 . . . . . * Time In Millis - . . . . . x + 1h . . . . . x + 2h . . . . . x + 3h . . . . . x + 4h . . . . . * DST Offset - . . . . . 0h . . . . . 1h . . . . . 1h . . . . . 1h . . . . . * Adjusted DST - . . . . . 0h . . . . . < b > 0h < / b > . . . . . 1h . . . . . 1h . . . . . * Adjusted Time - . . . . . x + 1h . . . . . x + 2h . . . . . < b > x + 2h < / b > . . . . . < b > x + 3h < / b > . . . . . * < / pre > * @ return the adjusted DST offset . */ private int adjustDstOffsetForInvalidWallClock ( long standardTimeInZone , TimeZone zone , int dstOffset ) { } }
if ( dstOffset != 0 ) { // If applying the DST offset produces a time that is outside DST then it must be // an invalid wall clock so clear the DST offset to avoid that happening . if ( ! zone . inDaylightTime ( new Date ( standardTimeInZone - dstOffset ) ) ) { dstOffset = 0 ; } } return dstOffset ;
public class AbstractSequenceClassifier { /** * Loads a classifier from the file specified . If the file ' s name ends in . gz , * uses a GZIPInputStream , else uses a regular FileInputStream . This method * closes the File when done . * @ param file * Loads a classifier from this file . * @ param props * Properties in this object will be used to overwrite those * specified in the serialized classifier * @ throws IOException * If there are problems accessing the input stream * @ throws ClassCastException * If there are problems interpreting the serialized data * @ throws ClassNotFoundException * If there are problems interpreting the serialized data */ public void loadClassifier ( File file , Properties props ) throws ClassCastException , IOException , ClassNotFoundException { } }
Timing . startDoing ( "Loading classifier from " + file . getAbsolutePath ( ) ) ; BufferedInputStream bis ; if ( file . getName ( ) . endsWith ( ".gz" ) ) { bis = new BufferedInputStream ( new GZIPInputStream ( new FileInputStream ( file ) ) ) ; } else { bis = new BufferedInputStream ( new FileInputStream ( file ) ) ; } loadClassifier ( bis , props ) ; bis . close ( ) ; Timing . endDoing ( ) ;
public class MessageAttributeValue { /** * Not implemented . Reserved for future use . * @ param binaryListValues * Not implemented . Reserved for future use . * @ return Returns a reference to this object so that method calls can be chained together . */ public MessageAttributeValue withBinaryListValues ( java . util . Collection < java . nio . ByteBuffer > binaryListValues ) { } }
setBinaryListValues ( binaryListValues ) ; return this ;
public class MtasMaximumExpandSpans { /** * Go to next start position . * @ return true , if successful * @ throws IOException Signals that an I / O exception has occurred . */ private boolean goToNextStartPosition ( ) throws IOException { } }
int basicStartPosition ; int basicEndPosition ; if ( docId == - 1 || docId == NO_MORE_DOCS ) { throw new IOException ( "no document" ) ; } else { while ( ( basicStartPosition = subSpans . nextStartPosition ( ) ) != NO_MORE_POSITIONS ) { basicEndPosition = subSpans . endPosition ( ) ; startPosition = Math . max ( minPosition , ( basicStartPosition - query . maximumLeft ) ) ; endPosition = Math . min ( maxPosition + 1 , ( basicEndPosition + query . maximumRight ) ) ; if ( startPosition <= ( basicStartPosition - query . minimumLeft ) && endPosition >= ( basicEndPosition + query . minimumRight ) ) { return true ; } } return false ; }
public class OAbstractFile { /** * ( non - Javadoc ) * @ see com . orientechnologies . orient . core . storage . fs . OFileAAA # delete ( ) */ public void delete ( ) throws IOException { } }
close ( ) ; if ( osFile != null ) { boolean deleted = osFile . delete ( ) ; while ( ! deleted ) { OMemoryWatchDog . freeMemory ( 100 ) ; deleted = osFile . delete ( ) ; } }
public class BinaryTreeSoftAddressableHeap { /** * { @ inheritDoc } */ @ Override public SoftHandle < K , V > findMin ( ) { } }
if ( size == 0 ) { throw new NoSuchElementException ( ) ; } return rootList . head . suffixMin . root . cHead ;
public class CliTool { /** * Parse and execute one of the commands . * @ param args Command line arguments ( command and options ) . * @ param commands A list of commands . */ protected static void main ( String [ ] args , CliTool ... commands ) { } }
if ( commands . length == 1 ) { main ( args , commands [ 0 ] ) ; } else { JCommander jc = new JCommander ( ) ; for ( CliTool command : commands ) { jc . addCommand ( command ) ; } jc . addConverterFactory ( new CustomParameterConverters ( ) ) ; jc . setProgramName ( "" ) ; ExitStatus exitStatus = ExitStatus . SUCCESS ; try { jc . parse ( args ) ; final String commandName = jc . getParsedCommand ( ) ; if ( commandName == null ) { helpDisplayCommandOptions ( System . err , jc ) ; } else { List < Object > objects = jc . getCommands ( ) . get ( commandName ) . getObjects ( ) ; if ( objects . size ( ) != 1 ) { throw new RuntimeException ( ) ; } CliTool command = CliTool . class . cast ( objects . get ( 0 ) ) ; exitStatus = command . call ( ) ; if ( command . callSystemExit ) { System . exit ( exitStatus . code ) ; } } } catch ( ExitStatusException e ) { System . err . println ( e . getMessage ( ) ) ; if ( e . getCause ( ) != null ) { e . getCause ( ) . printStackTrace ( System . err ) ; } exitStatus = e . exitStatus ; } catch ( MissingCommandException e ) { System . err . println ( "Invalid argument: " + e ) ; System . err . println ( ) ; helpDisplayCommandOptions ( System . err , jc ) ; exitStatus = ExitStatus . ERROR_INVALID_ARGUMENTS ; } catch ( ParameterException e ) { System . err . println ( "Invalid argument: " + e . getMessage ( ) ) ; System . err . println ( ) ; if ( jc . getParsedCommand ( ) == null ) { helpDisplayCommandOptions ( System . err , jc ) ; } else { helpDisplayCommandOptions ( System . err , jc . getParsedCommand ( ) , jc ) ; } exitStatus = ExitStatus . ERROR_INVALID_ARGUMENTS ; } catch ( Throwable t ) { System . err . println ( "An unhandled exception occurred. Stack trace below." ) ; t . printStackTrace ( System . err ) ; exitStatus = ExitStatus . ERROR_OTHER ; } }
public class GqlQuery { /** * Use { @ link # getNamedBindingsMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , com . google . datastore . v1 . GqlQueryParameter > getNamedBindings ( ) { } }
return getNamedBindingsMap ( ) ;
public class DataUnitBuilder { /** * Returns the application layer service of a given protocol data unit . * @ param apdu application layer protocol data unit , requires < code > apdu . length < / code > & gt ; 1 * @ return APDU service code */ public static int getAPDUService ( final byte [ ] apdu ) { } }
if ( apdu . length < 2 ) throw new KNXIllegalArgumentException ( "getting APDU service from [0x" + toHex ( apdu , "" ) + "], APCI length < 2" ) ; // high 4 bits of APCI final int apci4 = ( apdu [ 0 ] & 0x03 ) << 2 | ( apdu [ 1 ] & 0xC0 ) >> 6 ; // lowest 6 bits of APCI final int apci6 = apdu [ 1 ] & 0x3f ; // group value codes // group read if ( apci4 == 0 ) { if ( apci6 == 0 ) return 0 ; } // group response else if ( apci4 == 1 ) return 0x40 ; // group write else if ( apci4 == 2 ) return 0x80 ; // individual address codes else if ( apci4 == 3 || apci4 == 4 || apci4 == 5 ) { if ( apci6 == 0 ) return apci4 << 6 ; } // ADC read code else if ( apci4 == 6 ) return apci4 << 6 ; else if ( apci4 == 7 ) { // extended memory r / w services use the same 4 MSB as the ADC response code if ( apdu . length > 5 || apci6 > 0x30 ) return apci4 << 6 | apci6 ; // ADC response code return apci4 << 6 ; } // memory codes else if ( apci4 == 8 || apci4 == 9 || apci4 == 10 ) return apci4 << 6 ; // the rest else return apci4 << 6 | apci6 ; // unknown codes final int code = apci4 << 6 | apci6 ; logger . warn ( "unknown APCI service code 0x" + Integer . toHexString ( code ) ) ; return code ;
public class ComponentProxy { /** * { @ inheritDoc } */ public Object intercept ( Object object , Method method , Object [ ] args , MethodProxy proxy ) throws Throwable { } }
if ( isFinalizeMethod ( method ) ) { // swallow finalize call return null ; } else if ( isEqualsMethod ( method ) ) { return equals ( args [ 0 ] ) ? Boolean . TRUE : Boolean . FALSE ; } else if ( isHashCodeMethod ( method ) ) { return new Integer ( hashCode ( ) ) ; } else if ( isToStringMethod ( method ) ) { return toString ( ) ; } else if ( isGetOverwritesMethod ( method ) ) { return getOverwrites ( ) ; } else if ( isGetInjectionSourceMethod ( method ) ) { return getInjectionSource ( ) ; } return proxy . invokeSuper ( object , args ) ;
public class CompareFacesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CompareFacesRequest compareFacesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( compareFacesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( compareFacesRequest . getSourceImage ( ) , SOURCEIMAGE_BINDING ) ; protocolMarshaller . marshall ( compareFacesRequest . getTargetImage ( ) , TARGETIMAGE_BINDING ) ; protocolMarshaller . marshall ( compareFacesRequest . getSimilarityThreshold ( ) , SIMILARITYTHRESHOLD_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Unmarshaller { /** * Unmarshals the embedded fields of this entity . * @ throws Throwable * propagated */ private void unmarshalEmbeddedFields ( ) throws Throwable { } }
for ( EmbeddedMetadata embeddedMetadata : entityMetadata . getEmbeddedMetadataCollection ( ) ) { if ( embeddedMetadata . getStorageStrategy ( ) == StorageStrategy . EXPLODED ) { unmarshalWithExplodedStrategy ( embeddedMetadata , entity ) ; } else { unmarshalWithImplodedStrategy ( embeddedMetadata , entity , nativeEntity ) ; } }
public class InputTextRenderer { /** * Add script to enable the input mask . If the mask attribute starts with { @ code { } } the value is expected to be a * JSON object ( and can for example be used to set a regular expression : { @ code { regex : ' [ 0-9 \ u0600 - \ u06FF ] * ' } } ) . * See https : / / github . com / RobinHerbots / Inputmask . * @ param context * @ param inputText * @ param fieldId * @ param rw * @ throws IOException */ protected void encodeMask ( FacesContext context , InputText inputText , String fieldId , ResponseWriter rw ) throws IOException { } }
if ( inputText . getMask ( ) != null && ! inputText . getMask ( ) . isEmpty ( ) ) { rw . startElement ( "script" , inputText ) ; rw . writeText ( "Inputmask(" , null ) ; if ( inputText . getMask ( ) . trim ( ) . startsWith ( "{" ) ) { rw . writeText ( inputText . getMask ( ) . trim ( ) , null ) ; } else { rw . writeText ( String . format ( "\"%s\"" , inputText . getMask ( ) . replace ( "\"" , "\\\"" ) ) , null ) ; } rw . writeText ( ").mask(document.getElementById(\"" , null ) ; rw . writeText ( fieldId , null ) ; rw . writeText ( "\"));" , null ) ; rw . endElement ( "script" ) ; }
public class ServerReplicationConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ServerReplicationConfiguration serverReplicationConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( serverReplicationConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serverReplicationConfiguration . getServer ( ) , SERVER_BINDING ) ; protocolMarshaller . marshall ( serverReplicationConfiguration . getServerReplicationParameters ( ) , SERVERREPLICATIONPARAMETERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LambdaDslJsonArray { /** * Combine all the matchers using AND * @ param value Attribute example value * @ param rules Matching rules to apply */ public LambdaDslJsonArray and ( Object value , MatchingRule ... rules ) { } }
pactArray . and ( value , rules ) ; return this ;
public class ThriftCodecByteCodeGenerator { /** * Defines the code to inject data into the struct public fields . */ private void injectStructFields ( MethodDefinition read , LocalVariableDefinition instance , Map < Short , LocalVariableDefinition > structData ) { } }
for ( ThriftFieldMetadata field : metadata . getFields ( THRIFT_FIELD ) ) { injectField ( read , field , instance , structData . get ( field . getId ( ) ) ) ; }
public class Html5WebSocket { /** * { @ inheritDoc } */ @ Override public void send ( @ Nonnull final ArrayBuffer data ) throws IllegalStateException { } }
checkConnected ( ) ; _webSocket . send ( data ) ;
public class MediaSource { /** * Check if a matching rendition is found for any of the provided media formats and other media args . * The first match is returned . * @ param media Media * @ param asset Asset * @ param mediaArgs Media args * @ return true if a rendition was found */ private boolean resolveFirstMatchRenditions ( Media media , Asset asset , MediaArgs mediaArgs ) { } }
Rendition rendition = asset . getRendition ( mediaArgs ) ; if ( rendition != null ) { media . setRenditions ( ImmutableList . of ( rendition ) ) ; media . setUrl ( rendition . getUrl ( ) ) ; return true ; } return false ;
public class PersonDocumentRepositoryMongoImpl { /** * { @ inheritDoc } */ @ Override public Collection < PersonDocument > findByRootAndSurnameBeginsWith ( final RootDocument rootDocument , final String beginsWith ) { } }
logger . debug ( "Starting findByRootAndSurnameBeginsWith" ) ; if ( rootDocument == null ) { return Collections . emptyList ( ) ; } final Collection < PersonDocument > personDocuments = findByFileAndSurnameBeginsWith ( rootDocument . getFilename ( ) , beginsWith ) ; for ( final PersonDocument personDocument : personDocuments ) { final Person person = personDocument . getGedObject ( ) ; person . setParent ( rootDocument . getGedObject ( ) ) ; } logger . debug ( "Ending findByRootAndSurnameBeginsWith" ) ; return personDocuments ;
public class Path3d { /** * Replies the coordinates of this path in an array of * double precision floating - point numbers . * @ param transform is the transformation to apply to all the coordinates . * @ return the coordinates . */ @ Pure public double [ ] toDoubleArray ( Transform3D transform ) { } }
double [ ] clone = new double [ this . numCoordsProperty . get ( ) ] ; if ( transform == null ) { for ( int i = 0 ; i < this . numCoordsProperty . get ( ) ; ++ i ) { clone [ i ] = this . coordsProperty [ i ] . get ( ) ; } } else { Point3f p = new Point3f ( ) ; for ( int i = 0 ; i < clone . length ; ) { p . x = this . coordsProperty [ i ] . get ( ) ; p . y = this . coordsProperty [ i + 1 ] . get ( ) ; p . y = this . coordsProperty [ i + 2 ] . get ( ) ; transform . transform ( p ) ; clone [ i ++ ] = p . x ; clone [ i ++ ] = p . y ; clone [ i ++ ] = p . z ; } } return clone ;
public class ExecutionEntityManagerImpl { /** * UPDATE METHODS */ @ Override public void updateExecutionTenantIdForDeployment ( String deploymentId , String newTenantId ) { } }
executionDataManager . updateExecutionTenantIdForDeployment ( deploymentId , newTenantId ) ;
public class EngineDefaults { /** * A list of parameters specific to a particular cache node type . Each element in the list contains detailed * information about one parameter . * @ return A list of parameters specific to a particular cache node type . Each element in the list contains detailed * information about one parameter . */ public java . util . List < CacheNodeTypeSpecificParameter > getCacheNodeTypeSpecificParameters ( ) { } }
if ( cacheNodeTypeSpecificParameters == null ) { cacheNodeTypeSpecificParameters = new com . amazonaws . internal . SdkInternalList < CacheNodeTypeSpecificParameter > ( ) ; } return cacheNodeTypeSpecificParameters ;
public class TransactionalCache { /** * Put an instance into the cache . * @ param id * The id . * @ param value * The instance . * @ param mode * The mode . */ public void put ( Id id , Object value , Mode mode ) { } }
if ( Mode . WRITE . equals ( mode ) ) { writeCache . put ( id , value ) ; } readCache . put ( new CacheKey ( id ) , value ) ;
public class TextComponentUtil { /** * Eats whitespace lines after the given offset until it finds a non - whitespace line and * returns the start of the last whitespace line found , or the initial value if none was . */ public static int getDeepestWhiteSpaceLineStartAfter ( String script , int offset ) { } }
if ( offset < 0 ) { return offset ; } int i = offset ; while ( true ) { int lineStartAfter = getWhiteSpaceLineStartAfter ( script , i ) ; if ( lineStartAfter == - 1 ) { return i ; } else { i = lineStartAfter ; } }
public class AnnotationFilter { /** * { @ inheritDoc } * @ return { @ code true } if the specified { @ link Class # isAnnotationPresent ( Class ) annotation is present } for the given * { @ code type } , { @ code false } otherwise . */ @ Override public boolean accept ( Class < ? > type ) { } }
if ( type != null ) { if ( this . forceInheritence ) { Annotation annotation = this . annotationUtil . getTypeAnnotation ( type , this . annotationType ) ; return ( annotation != null ) ; } else { return type . isAnnotationPresent ( this . annotationType ) ; } } return false ;
public class AmazonCloudDirectoryClient { /** * Creates a < a > TypedLinkFacet < / a > . For more information , see < a href = * " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / directory _ objects _ links . html # directory _ objects _ links _ typedlink " * > Typed Links < / a > . * @ param createTypedLinkFacetRequest * @ return Result of the CreateTypedLinkFacet operation returned by the service . * @ throws InternalServiceException * Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in * which case you can retry your request until it succeeds . Otherwise , go to the < a * href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any * operational issues with the service . * @ throws InvalidArnException * Indicates that the provided ARN value is not valid . * @ throws RetryableConflictException * Occurs when a conflict with a previous successful write is detected . For example , if a write operation * occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this * exception may result . This generally occurs when the previous write did not have time to propagate to the * host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to * this exception . * @ throws ValidationException * Indicates that your request is malformed in some manner . See the exception message . * @ throws LimitExceededException * Indicates that limits are exceeded . See < a * href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more * information . * @ throws AccessDeniedException * Access denied . Check your permissions . * @ throws ResourceNotFoundException * The specified resource could not be found . * @ throws FacetAlreadyExistsException * A facet with the same name already exists . * @ throws InvalidRuleException * Occurs when any of the rule parameter keys or values are invalid . * @ throws FacetValidationException * The < a > Facet < / a > that you provided was not well formed or could not be validated with the schema . * @ sample AmazonCloudDirectory . CreateTypedLinkFacet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / CreateTypedLinkFacet " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateTypedLinkFacetResult createTypedLinkFacet ( CreateTypedLinkFacetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateTypedLinkFacet ( request ) ;
public class Branch { /** * Enabled Strong matching check using chrome cookies . This method should be called before * Branch # getAutoInstance ( Context ) . < / p > * @ param cookieMatchDomain The domain for the url used to match the cookie ( eg . example . app . link ) * @ param delay Time in millisecond to wait for the strong match to check to finish before Branch init session is called . * Default time is 750 msec . */ public static void enableCookieBasedMatching ( String cookieMatchDomain , int delay ) { } }
cookieBasedMatchDomain_ = cookieMatchDomain ; BranchStrongMatchHelper . getInstance ( ) . setStrongMatchUrlHitDelay ( delay ) ;
public class ZipClient { /** * See if there are any assets under the specified directory in the zip */ @ Override protected boolean hasChildren ( final String relative ) throws IOException { } }
ZipFile zip = createZipFile ( ) ; if ( null == zip ) { return false ; } Enumeration < ? extends ZipEntry > entries = zip . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; if ( ( relative . equals ( "" ) ) || entry . getName ( ) . startsWith ( relative + File . separator ) ) { return true ; } } zip . close ( ) ; return false ;
public class MapUtil { /** * 获取Map指定key的值 , 并转换为Integer * @ param map Map * @ param key 键 * @ return 值 * @ since 4.0.6 */ public static Integer getInt ( Map < ? , ? > map , Object key ) { } }
return get ( map , key , Integer . class ) ;
public class StaticTraceInstrumentation { /** * Setup the lists of classes and jars to process based on the command * line arguments provided . */ public void processArguments ( String [ ] args ) throws IOException { } }
List < File > classFiles = new ArrayList < File > ( ) ; List < File > jarFiles = new ArrayList < File > ( ) ; String [ ] fileArgs = null ; for ( int i = 0 ; i < args . length ; i ++ ) { if ( args [ i ] . equalsIgnoreCase ( "--config" ) ) { configFileParser = new TraceConfigFileParser ( new File ( args [ ++ i ] ) ) ; configFileParser . parse ( ) ; InstrumentationOptions options = configFileParser . getInstrumentationOptions ( ) ; if ( options . getAddFFDC ( ) ) { setInstrumentWithFFDC ( true ) ; } setTraceType ( options . getTraceType ( ) ) ; setInstrumentationOptions ( options ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--debug" ) || args [ i ] . equals ( "-d" ) ) { setDebug ( true ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--tr" ) ) { setTraceType ( TraceType . TR ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--websphere" ) ) { setTraceType ( TraceType . TR ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--java-logging" ) ) { setTraceType ( TraceType . JAVA_LOGGING ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--jsr47" ) ) { setTraceType ( TraceType . JAVA_LOGGING ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--none" ) ) { setTraceType ( TraceType . NONE ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--no-trace" ) ) { setTraceType ( TraceType . NONE ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--ffdc" ) ) { setInstrumentWithFFDC ( true ) ; } else if ( args [ i ] . equalsIgnoreCase ( "--compute-frames" ) ) { setComputeFrames ( true ) ; } else { fileArgs = new String [ args . length - i ] ; System . arraycopy ( args , i , fileArgs , 0 , fileArgs . length ) ; break ; } } if ( fileArgs == null || fileArgs . length == 0 ) { throw new IllegalArgumentException ( "Empty file lists are illegal" ) ; } for ( int i = 0 ; i < fileArgs . length ; i ++ ) { File f = new File ( fileArgs [ i ] ) ; if ( ! f . exists ( ) ) { throw new IllegalArgumentException ( f + " does not exist" ) ; } else if ( f . isDirectory ( ) ) { classFiles . addAll ( getClassFiles ( f , null ) ) ; } else if ( f . getName ( ) . endsWith ( ".class" ) ) { classFiles . add ( f ) ; } else if ( f . getName ( ) . endsWith ( ".jar" ) ) { jarFiles . add ( f ) ; } else if ( f . getName ( ) . endsWith ( ".zip" ) ) { jarFiles . add ( f ) ; } else { System . err . println ( f + " is an unexpected file type; ignoring" ) ; } } setClassFiles ( classFiles ) ; setJarFiles ( jarFiles ) ;
public class ListPullRequestsResult { /** * The system - generated IDs of the pull requests . * @ param pullRequestIds * The system - generated IDs of the pull requests . */ public void setPullRequestIds ( java . util . Collection < String > pullRequestIds ) { } }
if ( pullRequestIds == null ) { this . pullRequestIds = null ; return ; } this . pullRequestIds = new java . util . ArrayList < String > ( pullRequestIds ) ;
public class Main { /** * Initializes logging , specifically by looking for log4j . xml or * log4j . properties file in DataCleaner ' s home directory . * @ return true if a logging configuration file was found , or false * otherwise */ protected static boolean initializeLogging ( ) { } }
try { // initial logging config , used before anything else { final URL url = Main . class . getResource ( "log4j-initial.xml" ) ; assert url != null ; DOMConfigurator . configure ( url ) ; } if ( ClassLoaderUtils . IS_WEB_START ) { final URL url = Main . class . getResource ( "log4j-jnlp.xml" ) ; assert url != null ; println ( "Using JNLP log configuration: " + url ) ; DOMConfigurator . configure ( url ) ; return true ; } final File dataCleanerHome = DataCleanerHome . getAsFile ( ) ; if ( initializeLoggingFromDirectory ( dataCleanerHome ) ) { return true ; } if ( initializeLoggingFromDirectory ( new File ( "." ) ) ) { return true ; } // fall back to default log4j . xml file in classpath final URL url = Main . class . getResource ( "log4j-default.xml" ) ; assert url != null ; println ( "Using default log configuration: " + url ) ; DOMConfigurator . configure ( url ) ; return false ; } catch ( final NoClassDefFoundError e ) { // can happen if log4j is not on the classpath println ( "Failed to initialize logging, class not found: " + e . getMessage ( ) ) ; return false ; }
public class ICULocaleService { /** * Convenience method for callers using locales . This returns the standard * Locale list , built from the Set of visible ids . */ public Locale [ ] getAvailableLocales ( ) { } }
// TODO make this wrap getAvailableULocales later Set < String > visIDs = getVisibleIDs ( ) ; Locale [ ] locales = new Locale [ visIDs . size ( ) ] ; int n = 0 ; for ( String id : visIDs ) { Locale loc = LocaleUtility . getLocaleFromName ( id ) ; locales [ n ++ ] = loc ; } return locales ;
public class XMLDataLoader { /** * Returns a mapping from all testcases to their results for a certain collectorName . * @ param collectorName The name of the collector for loading the Results * @ return Mapping from all testcases to their results */ public Map < String , Map < Date , Long > > getData ( final String collectorName ) { } }
final Map < String , Map < Date , Long > > map = new HashMap < > ( ) ; final Testcases testcases = data . getTestcases ( ) ; for ( final TestcaseType tct : testcases . getTestcase ( ) ) { final Map < Date , Long > measures = new HashMap < > ( ) ; final List < Datacollector > collectorMap = tct . getDatacollector ( ) ; Datacollector collector = null ; for ( final Datacollector dc : collectorMap ) { if ( dc . getName ( ) . equals ( collectorName ) ) { collector = dc ; } } if ( collector == null ) { LOG . error ( "Achtung: Datenkollektor " + collectorName + " nicht vorhanden" ) ; } else { for ( final Result s : collector . getResult ( ) ) { measures . put ( new Date ( s . getDate ( ) ) , ( long ) s . getValue ( ) ) ; } map . put ( tct . getName ( ) , measures ) ; } } return map ;
public class RestApiClient { /** * Gets the sessions . * @ return the sessions */ public SessionEntities getSessions ( ) { } }
SessionEntities sessionEntities = restClient . get ( "sessions" , SessionEntities . class , new HashMap < String , String > ( ) ) ; return sessionEntities ;
public class TypeUtil { /** * If either type is a double TypeMirror , a double TypeMirror is returned . * Otherwise , if either type is a float TypeMirror , a float TypeMirror is returned . * Otherwise , if either type is a long TypeMirror , a long TypeMirror is returned . * Otherwise , an int TypeMirror is returned . See jls - 5.6.2. * @ param type1 a numeric type * @ param type2 a numeric type * @ return the result of binary numeric promotion applied to type1 and type2 */ public TypeMirror binaryNumericPromotion ( TypeMirror type1 , TypeMirror type2 ) { } }
TypeKind t1 = type1 . getKind ( ) ; TypeKind t2 = type2 . getKind ( ) ; if ( t1 == TypeKind . DECLARED ) { t1 = javacTypes . unboxedType ( type1 ) . getKind ( ) ; } if ( t2 == TypeKind . DECLARED ) { t2 = javacTypes . unboxedType ( type2 ) . getKind ( ) ; } if ( t1 == TypeKind . DOUBLE || t2 == TypeKind . DOUBLE ) { return getDouble ( ) ; } else if ( t1 == TypeKind . FLOAT || t2 == TypeKind . FLOAT ) { return getFloat ( ) ; } else if ( t1 == TypeKind . LONG || t2 == TypeKind . LONG ) { return getLong ( ) ; } else { return getInt ( ) ; }
public class ValueNumberFrameModelingVisitor { /** * Load a static field . * @ param staticField * the field * @ param obj * the Instruction loading the field */ private void loadStaticField ( XField staticField , Instruction obj ) { } }
if ( RLE_DEBUG ) { System . out . println ( "[loadStaticField for field " + staticField + " in instruction " + handle ) ; } ValueNumberFrame frame = getFrame ( ) ; AvailableLoad availableLoad = new AvailableLoad ( staticField ) ; ValueNumber [ ] loadedValue = frame . getAvailableLoad ( availableLoad ) ; if ( loadedValue == null ) { // Make the load available int numWordsProduced = getNumWordsProduced ( obj ) ; loadedValue = getOutputValues ( EMPTY_INPUT_VALUE_LIST , numWordsProduced ) ; frame . addAvailableLoad ( availableLoad , loadedValue ) ; if ( RLE_DEBUG ) { System . out . println ( "[making load of " + staticField + " available]" ) ; } } else { if ( RLE_DEBUG ) { System . out . println ( "[found available load of " + staticField + "]" ) ; } } if ( VERIFY_INTEGRITY ) { checkConsumedAndProducedValues ( obj , EMPTY_INPUT_VALUE_LIST , loadedValue ) ; } pushOutputValues ( loadedValue ) ;
public class MapCollections { /** * Returns unmodifiable MapList . * @ param < K > * @ param < V > * @ param map * @ return */ public static final < K , V > MapList < K , V > unmodifiableMapList ( MapList < K , V > map ) { } }
return new UnmodifiableMapList ( Collections . unmodifiableMap ( map ) , map . getComparator ( ) ) ;
public class JKIOUtil { /** * Cop resources from jar to dir . * @ param sourceClassPath the source class path * @ param dest the dest */ public static void copResourcesFromJarToDir ( String sourceClassPath , File dest ) { } }
try { List < File > resourcesInnPackage = getResourcesInnPackage ( sourceClassPath ) ; for ( File file : resourcesInnPackage ) { JK . printBlock ( "Copying file: " + file . getName ( ) + " to folder " + dest . getAbsolutePath ( ) ) ; FileUtils . copyFileToDirectory ( file , dest ) ; } } catch ( IOException e ) { JK . throww ( e ) ; }
public class NonBlockingHashMap { /** * putIfMatch only returns a null if passed in an expected null . */ private static final Object putIfMatch ( final NonBlockingHashMap topmap , final Object [ ] kvs , final Object key , final Object putval , final Object expVal ) { } }
assert putval != null ; assert ! ( putval instanceof Prime ) ; assert ! ( expVal instanceof Prime ) ; final int fullhash = hash ( key ) ; // throws NullPointerException if key null final int len = len ( kvs ) ; // Count of key / value pairs , reads kvs . length final CHM chm = chm ( kvs ) ; // Reads kvs [ 0] final int [ ] hashes = hashes ( kvs ) ; // Reads kvs [ 1 ] , read before kvs [ 0] int idx = fullhash & ( len - 1 ) ; // Key - Claim stanza : spin till we can claim a Key ( or force a resizing ) . int reprobe_cnt = 0 ; Object K = null , V = null ; Object [ ] newkvs = null ; while ( true ) { // Spin till we get a Key slot V = val ( kvs , idx ) ; // Get old value ( before volatile read below ! ) K = key ( kvs , idx ) ; // Get current key if ( K == null ) { // Slot is free ? // Found an empty Key slot - which means this Key has never been in // this table . No need to put a Tombstone - the Key is not here ! if ( putval == TOMBSTONE ) return putval ; // Not - now & never - been in this table // Claim the null key - slot if ( CAS_key ( kvs , idx , null , key ) ) { // Claim slot for Key chm . _slots . add ( 1 ) ; // Raise key - slots - used count hashes [ idx ] = fullhash ; // Memoize fullhash break ; // Got it ! } // CAS to claim the key - slot failed . // This re - read of the Key points out an annoying short - coming of Java // CAS . Most hardware CAS ' s report back the existing value - so that // if you fail you have a * witness * - the value which caused the CAS // to fail . The Java API turns this into a boolean destroying the // witness . Re - reading does not recover the witness because another // thread can write over the memory after the CAS . Hence we can be in // the unfortunate situation of having a CAS fail * for cause * but // having that cause removed by a later store . This turns a // non - spurious - failure CAS ( such as Azul has ) into one that can // apparently spuriously fail - and we avoid apparent spurious failure // by not allowing Keys to ever change . K = key ( kvs , idx ) ; // CAS failed , get updated value assert K != null ; // If keys [ idx ] is null , CAS shoulda worked } // Key slot was not null , there exists a Key here // We need a volatile - read here to preserve happens - before semantics on // newly inserted Keys . If the Key body was written just before inserting // into the table a Key - compare here might read the uninitalized Key body . // Annoyingly this means we have to volatile - read before EACH key compare . newkvs = chm . _newkvs ; // VOLATILE READ before key compare if ( keyeq ( K , key , hashes , idx , fullhash ) ) break ; // Got it ! // get and put must have the same key lookup logic ! Lest ' get ' give // up looking too soon . // topmap . _ reprobes . add ( 1 ) ; if ( ++ reprobe_cnt >= reprobe_limit ( len ) || // too many probes or key == TOMBSTONE ) { // found a TOMBSTONE key , means no more keys // We simply must have a new table to do a ' put ' . At this point a // ' get ' will also go to the new table ( if any ) . We do not need // to claim a key slot ( indeed , we cannot find a free one to claim ! ) . newkvs = chm . resize ( topmap , kvs ) ; if ( expVal != null ) topmap . help_copy ( newkvs ) ; // help along an existing copy return putIfMatch ( topmap , newkvs , key , putval , expVal ) ; } idx = ( idx + 1 ) & ( len - 1 ) ; // Reprobe ! } // End of spinning till we get a Key slot // Found the proper Key slot , now update the matching Value slot . We // never put a null , so Value slots monotonically move from null to // not - null ( deleted Values use Tombstone ) . Thus if ' V ' is null we // fail this fast cutout and fall into the check for table - full . if ( putval == V ) return V ; // Fast cutout for no - change // See if we want to move to a new table ( to avoid high average re - probe // counts ) . We only check on the initial set of a Value from null to // not - null ( i . e . , once per key - insert ) . Of course we got a ' free ' check // of newkvs once per key - compare ( not really free , but paid - for by the // time we get here ) . if ( newkvs == null && // New table - copy already spotted ? // Once per fresh key - insert check the hard way ( ( V == null && chm . tableFull ( reprobe_cnt , len ) ) || // Or we found a Prime , but the JMM allowed reordering such that we // did not spot the new table ( very rare race here : the writing // thread did a CAS of _ newkvs then a store of a Prime . This thread // reads the Prime , then reads _ newkvs - but the read of Prime was so // delayed ( or the read of _ newkvs was so accelerated ) that they // swapped and we still read a null _ newkvs . The resize call below // will do a CAS on _ newkvs forcing the read . V instanceof Prime ) ) newkvs = chm . resize ( topmap , kvs ) ; // Force the new table copy to start // See if we are moving to a new table . // If so , copy our slot and retry in the new table . if ( newkvs != null ) return putIfMatch ( topmap , chm . copy_slot_and_check ( topmap , kvs , idx , expVal ) , key , putval , expVal ) ; // We are finally prepared to update the existing table assert ! ( V instanceof Prime ) ; // Must match old , and we do not ? Then bail out now . Note that either V // or expVal might be TOMBSTONE . Also V can be null , if we ' ve never // inserted a value before . expVal can be null if we are called from // copy _ slot . if ( expVal != NO_MATCH_OLD && // Do we care about expected - Value at all ? V != expVal && // No instant match already ? ( expVal != MATCH_ANY || V == TOMBSTONE || V == null ) && ! ( V == null && expVal == TOMBSTONE ) && // Match on null / TOMBSTONE combo ( expVal == null || ! expVal . equals ( V ) ) ) // Expensive equals check at the last return V ; // Do not update ! // Actually change the Value in the Key , Value pair if ( CAS_val ( kvs , idx , V , putval ) ) { // CAS succeeded - we did the update ! // Both normal put ' s and table - copy calls putIfMatch , but table - copy // does not ( effectively ) increase the number of live k / v pairs . if ( expVal != null ) { // Adjust sizes - a striped counter if ( ( V == null || V == TOMBSTONE ) && putval != TOMBSTONE ) chm . _size . add ( 1 ) ; if ( ! ( V == null || V == TOMBSTONE ) && putval == TOMBSTONE ) chm . _size . add ( - 1 ) ; } } else { // Else CAS failed V = val ( kvs , idx ) ; // Get new value // If a Prime ' d value got installed , we need to re - run the put on the // new table . Otherwise we lost the CAS to another racing put . // Simply retry from the start . if ( V instanceof Prime ) return putIfMatch ( topmap , chm . copy_slot_and_check ( topmap , kvs , idx , expVal ) , key , putval , expVal ) ; } // Win or lose the CAS , we are done . If we won then we know the update // happened as expected . If we lost , it means " we won but another thread // immediately stomped our update with no chance of a reader reading " . return ( V == null && expVal != null ) ? TOMBSTONE : V ;
public class PairLineNorm { /** * Sets the value of p1 and p2 to be equal to the values of the passed in objects */ public void set ( Vector3D_F64 l1 , Vector3D_F64 l2 ) { } }
this . l1 . set ( l1 ) ; this . l2 . set ( l2 ) ;
public class Stopwatch { /** * stops the watch * @ return returns the current time or 0 if watch not was running */ public long stop ( ) { } }
if ( isRunning ) { long time = _time ( ) - start ; total += time ; count ++ ; isRunning = false ; return time ; } return 0 ;
public class EntityUtils { /** * extractIds . * @ param entities a { @ link java . util . Collection } object . * @ param < T > a T object . * @ return a { @ link java . util . List } object . */ public static < T extends Entity < ? > > List < ? > extractIds ( Collection < T > entities ) { } }
List < Object > idList = CollectUtils . newArrayList ( ) ; for ( Iterator < T > iter = entities . iterator ( ) ; iter . hasNext ( ) ; ) { Entity < ? > element = iter . next ( ) ; try { idList . add ( PropertyUtils . getProperty ( element , "id" ) ) ; } catch ( Exception e ) { logger . error ( "getProperty error" , e ) ; continue ; } } return idList ;
public class SipSessionImpl { /** * / * ( non - Javadoc ) * @ see org . mobicents . javax . servlet . sip . SipSessionExt # setBypassLoadBalancer ( boolean ) */ @ Override public void setBypassLoadBalancer ( boolean bypassLoadBalancer ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "setting bypassLoadBalancer: " + bypassLoadBalancer + " on the sip session " + key ) ; } this . bypassLoadBalancer = bypassLoadBalancer ;
public class JwkProviderBuilder { /** * Creates a { @ link JwkProvider } * @ return a newly created { @ link JwkProvider } */ public JwkProvider build ( ) { } }
JwkProvider urlProvider = new UrlJwkProvider ( url ) ; if ( this . rateLimited ) { urlProvider = new RateLimitedJwkProvider ( urlProvider , bucket ) ; } if ( this . cached ) { urlProvider = new GuavaCachedJwkProvider ( urlProvider , cacheSize , expiresIn , expiresUnit ) ; } return urlProvider ;
public class KMeansPlusPlusSeed { /** * Computes the distance between each data point and the given centroid . If * { @ code selectMin } is set to true , then this will only overwrite the * values in { @ code distances } if the new distance is smaller . Otherwise * the new distance will always be stored in { @ code distances } . * @ param distances An array of distances that need to be updated . * @ param selectMin Set to true a new distance must smaller than the * current values in { @ code distances } . * @ param dataPoints The set of data points . * @ param centroid The centroid to compare against . */ private static void computeDistances ( double [ ] distances , boolean selectMin , Matrix dataPoints , DoubleVector centroid ) { } }
for ( int i = 0 ; i < distances . length ; ++ i ) { double distance = Similarity . euclideanDistance ( centroid , dataPoints . getRowVector ( i ) ) ; if ( ! selectMin || selectMin && distance < distances [ i ] ) distances [ i ] = distance ; }
public class FeatureExtractor { /** * Given a list of templates , extracts all features for the given state * @ param configuration * @ return * @ throws Exception */ public static Object [ ] extractAllParseFeatures ( Configuration configuration , int length ) { } }
if ( length == 26 ) return extractBasicFeatures ( configuration , length ) ; else if ( length == 72 ) return extractExtendedFeatures ( configuration , length ) ; else return extractExtendedFeaturesWithBrownClusters ( configuration , length ) ;
public class Evaluation { /** * Evaluate the network , with optional metadata * @ param labels Data labels * @ param predictions Network predictions * @ param recordMetaData Optional ; may be null . If not null , should have size equal to the number of outcomes / guesses */ @ Override public void eval ( INDArray labels , INDArray predictions , INDArray mask , final List < ? extends Serializable > recordMetaData ) { } }
Triple < INDArray , INDArray , INDArray > p = BaseEvaluation . reshapeAndExtractNotMasked ( labels , predictions , mask , axis ) ; if ( p == null ) { // All values masked out ; no - op return ; } INDArray labels2d = p . getFirst ( ) ; INDArray predictions2d = p . getSecond ( ) ; INDArray maskArray = p . getThird ( ) ; Preconditions . checkState ( maskArray == null , "Per-output masking for Evaluation is not supported" ) ; // Check for NaNs in predictions - without this , evaulation could silently be intepreted as class 0 prediction due to argmax long count = Nd4j . getExecutioner ( ) . execAndReturn ( new MatchCondition ( predictions2d , Conditions . isNan ( ) ) ) . getFinalResult ( ) . longValue ( ) ; org . nd4j . base . Preconditions . checkState ( count == 0 , "Cannot perform evaluation with NaNs present in predictions:" + " %s NaNs present in predictions INDArray" , count ) ; // Add the number of rows to numRowCounter numRowCounter += labels2d . size ( 0 ) ; if ( labels2d . dataType ( ) != predictions2d . dataType ( ) ) labels2d = labels2d . castTo ( predictions2d . dataType ( ) ) ; // If confusion is null , then Evaluation was instantiated without providing the classes - > infer # classes from if ( confusion == null ) { int nClasses = labels2d . columns ( ) ; if ( nClasses == 1 ) nClasses = 2 ; // Binary ( single output variable ) case if ( labelsList == null || labelsList . isEmpty ( ) ) { labelsList = new ArrayList < > ( nClasses ) ; for ( int i = 0 ; i < nClasses ; i ++ ) labelsList . add ( String . valueOf ( i ) ) ; } createConfusion ( nClasses ) ; } // Length of real labels must be same as length of predicted labels if ( ! Arrays . equals ( labels2d . shape ( ) , predictions2d . shape ( ) ) ) { throw new IllegalArgumentException ( "Unable to evaluate. Predictions and labels arrays are not same shape." + " Predictions shape: " + Arrays . toString ( predictions2d . shape ( ) ) + ", Labels shape: " + Arrays . toString ( labels2d . shape ( ) ) ) ; } // For each row get the most probable label ( column ) from prediction and assign as guessMax // For each row get the column of the true label and assign as currMax final int nCols = labels2d . columns ( ) ; final int nRows = labels2d . rows ( ) ; if ( nCols == 1 ) { INDArray binaryGuesses = predictions2d . gt ( binaryDecisionThreshold == null ? 0.5 : binaryDecisionThreshold ) . castTo ( predictions . dataType ( ) ) ; INDArray notLabel = labels2d . rsub ( 1.0 ) ; // Invert entries ( assuming 1 and 0) INDArray notGuess = binaryGuesses . rsub ( 1.0 ) ; // tp : predicted = 1 , actual = 1 int tp = labels2d . mul ( binaryGuesses ) . castTo ( DataType . INT ) . sumNumber ( ) . intValue ( ) ; // fp : predicted = 1 , actual = 0 int fp = notLabel . mul ( binaryGuesses ) . castTo ( DataType . INT ) . sumNumber ( ) . intValue ( ) ; // fn : predicted = 0 , actual = 1 int fn = notGuess . mul ( labels2d ) . castTo ( DataType . INT ) . sumNumber ( ) . intValue ( ) ; int tn = nRows - tp - fp - fn ; confusion ( ) . add ( 1 , 1 , tp ) ; confusion ( ) . add ( 1 , 0 , fn ) ; confusion ( ) . add ( 0 , 1 , fp ) ; confusion ( ) . add ( 0 , 0 , tn ) ; truePositives . incrementCount ( 1 , tp ) ; falsePositives . incrementCount ( 1 , fp ) ; falseNegatives . incrementCount ( 1 , fn ) ; trueNegatives . incrementCount ( 1 , tn ) ; truePositives . incrementCount ( 0 , tn ) ; falsePositives . incrementCount ( 0 , fn ) ; falseNegatives . incrementCount ( 0 , fp ) ; trueNegatives . incrementCount ( 0 , tp ) ; if ( recordMetaData != null ) { for ( int i = 0 ; i < binaryGuesses . size ( 0 ) ; i ++ ) { if ( i >= recordMetaData . size ( ) ) break ; int actual = labels2d . getDouble ( 0 ) == 0.0 ? 0 : 1 ; int predicted = binaryGuesses . getDouble ( 0 ) == 0.0 ? 0 : 1 ; addToMetaConfusionMatrix ( actual , predicted , recordMetaData . get ( i ) ) ; } } } else { INDArray guessIndex ; if ( binaryDecisionThreshold != null ) { if ( nCols != 2 ) { throw new IllegalStateException ( "Binary decision threshold is set, but number of columns for " + "predictions is " + nCols + ". Binary decision threshold can only be used for binary " + "prediction cases" ) ; } INDArray pClass1 = predictions2d . getColumn ( 1 ) ; guessIndex = pClass1 . gt ( binaryDecisionThreshold ) ; } else if ( costArray != null ) { // With a cost array : do argmax ( cost * probability ) instead of just argmax ( probability ) guessIndex = Nd4j . argMax ( predictions2d . mulRowVector ( costArray . castTo ( predictions2d . dataType ( ) ) ) , 1 ) ; } else { // Standard case : argmax guessIndex = Nd4j . argMax ( predictions2d , 1 ) ; } INDArray realOutcomeIndex = Nd4j . argMax ( labels2d , 1 ) ; val nExamples = guessIndex . length ( ) ; for ( int i = 0 ; i < nExamples ; i ++ ) { int actual = ( int ) realOutcomeIndex . getDouble ( i ) ; int predicted = ( int ) guessIndex . getDouble ( i ) ; confusion ( ) . add ( actual , predicted ) ; if ( recordMetaData != null && recordMetaData . size ( ) > i ) { Object m = recordMetaData . get ( i ) ; addToMetaConfusionMatrix ( actual , predicted , m ) ; } // instead of looping through each label for confusion // matrix , instead infer those values by determining if true / false negative / positive , // then just add across matrix // if actual = = predicted , then it ' s a true positive , assign true negative to every other label if ( actual == predicted ) { truePositives . incrementCount ( actual , 1 ) ; for ( int col = 0 ; col < nCols ; col ++ ) { if ( col == actual ) { continue ; } trueNegatives . incrementCount ( col , 1 ) ; // all cols prior } } else { falsePositives . incrementCount ( predicted , 1 ) ; falseNegatives . incrementCount ( actual , 1 ) ; // first determine intervals for adding true negatives int lesserIndex , greaterIndex ; if ( actual < predicted ) { lesserIndex = actual ; greaterIndex = predicted ; } else { lesserIndex = predicted ; greaterIndex = actual ; } // now loop through intervals for ( int col = 0 ; col < lesserIndex ; col ++ ) { trueNegatives . incrementCount ( col , 1 ) ; // all cols prior } for ( int col = lesserIndex + 1 ; col < greaterIndex ; col ++ ) { trueNegatives . incrementCount ( col , 1 ) ; // all cols after } for ( int col = greaterIndex + 1 ; col < nCols ; col ++ ) { trueNegatives . incrementCount ( col , 1 ) ; // all cols after } } } } if ( nCols > 1 && topN > 1 ) { // Calculate top N accuracy // TODO : this could be more efficient INDArray realOutcomeIndex = Nd4j . argMax ( labels2d , 1 ) ; val nExamples = realOutcomeIndex . length ( ) ; for ( int i = 0 ; i < nExamples ; i ++ ) { int labelIdx = ( int ) realOutcomeIndex . getDouble ( i ) ; double prob = predictions2d . getDouble ( i , labelIdx ) ; INDArray row = predictions2d . getRow ( i ) ; int countGreaterThan = ( int ) Nd4j . getExecutioner ( ) . exec ( new MatchCondition ( row , Conditions . greaterThan ( prob ) ) ) . getDouble ( 0 ) ; if ( countGreaterThan < topN ) { // For example , for top 3 accuracy : can have at most 2 other probabilities larger topNCorrectCount ++ ; } topNTotalCount ++ ; } }
public class CmsDriverManager { /** * Adds an alias entry . < p > * @ param dbc the database context * @ param project the current project * @ param alias the alias to add * @ throws CmsException if something goes wrong */ public void addAlias ( CmsDbContext dbc , CmsProject project , CmsAlias alias ) throws CmsException { } }
I_CmsVfsDriver vfsDriver = getVfsDriver ( dbc ) ; vfsDriver . insertAlias ( dbc , project , alias ) ;
public class ModelsImpl { /** * Updates an explicit list item for a Pattern . Any entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The Pattern . Any entity extractor ID . * @ param itemId The explicit list item ID . * @ param updateExplicitListItemOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > updateExplicitListItemAsync ( UUID appId , String versionId , UUID entityId , long itemId , UpdateExplicitListItemOptionalParameter updateExplicitListItemOptionalParameter , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateExplicitListItemWithServiceResponseAsync ( appId , versionId , entityId , itemId , updateExplicitListItemOptionalParameter ) , serviceCallback ) ;
public class OptimizableHashSet { /** * Returns the maximum number of entries that can be filled before rehashing . * @ param n the size of the backing array . * @ param f the load factor . * @ return the maximum number of entries before rehashing . */ public static int maxFill ( int n , float f ) { } }
return Math . min ( ( int ) Math . ceil ( ( double ) ( ( float ) n * f ) ) , n - 1 ) ;
public class SbtDependencyResolver { /** * / * - - - Overridden methods - - - */ @ Override protected ResolutionResult resolveDependencies ( String projectFolder , String topLevelFolder , Set < String > bomFiles ) { } }
Collection < AgentProjectInfo > projects = new ArrayList < > ( ) ; List < File > xmlFiles = new LinkedList < > ( ) ; // run sbt compile if the user turn on the sbt . runPreStep flag if ( sbtRunPreStep ) { runPreStep ( topLevelFolder ) ; } // check if sbt . targetFolder is not blank . // if not the system trying to search for compile . xml files under the the specific location // if yes the system trying to search for compile . xml files under the root of the project if ( StringUtils . isNotBlank ( sbtTargetFolder ) ) { Path path = Paths . get ( sbtTargetFolder ) ; if ( Files . exists ( path ) ) { xmlFiles = findXmlReport ( sbtTargetFolder , xmlFiles , new String [ ] { Constants . PATTERN + COMPILE_XML } , excludes ) ; } else { logger . warn ( "The target folder path {} doesn't exist" , sbtTargetFolder ) ; } } else { Collection < String > targetFolders = findTargetFolders ( topLevelFolder ) ; if ( ! targetFolders . isEmpty ( ) ) { for ( String targetPath : targetFolders ) { xmlFiles = findXmlReport ( targetPath , xmlFiles , new String [ ] { Constants . PATTERN + COMPILE_XML } , excludes ) ; } } else { logger . debug ( "Didn't find any target folder in {}" , topLevelFolder ) ; } } // if the system didn ' t find compile . xml files and the user didn ' t turn on the sbt . runPreStepFlag // the system print warning message to the user and ask him to turn on the flag . if ( xmlFiles . isEmpty ( ) && ! sbtRunPreStep ) { logger . warn ( "Didn't find compile.xml please try to turn on the flag {}" , SBT_TARGET_FOLDER ) ; } for ( File xmlFile : xmlFiles ) { projects . add ( parseXmlReport ( xmlFile ) ) ; } Set < String > excludes = new HashSet < > ( ) ; Map < AgentProjectInfo , Path > projectInfoPathMap = projects . stream ( ) . collect ( Collectors . toMap ( projectInfo -> projectInfo , projectInfo -> { if ( ignoreSourceFiles ) { excludes . addAll ( normalizeLocalPath ( projectFolder , topLevelFolder , extensionPattern ( SCALA_SCRIPT_EXTENSION ) , null ) ) ; } return Paths . get ( topLevelFolder ) ; } ) ) ; ResolutionResult resolutionResult ; if ( ! sbtAggregateModules ) { resolutionResult = new ResolutionResult ( projectInfoPathMap , excludes , getDependencyType ( ) , topLevelFolder ) ; } else { resolutionResult = new ResolutionResult ( projectInfoPathMap . keySet ( ) . stream ( ) . flatMap ( project -> project . getDependencies ( ) . stream ( ) ) . collect ( Collectors . toList ( ) ) , excludes , getDependencyType ( ) , topLevelFolder ) ; } return resolutionResult ;
public class Tabs { /** * Sets the call - back for the AJAX beforeLoad event . * @ param beforeLoadEvent * The ITabsAjaxEvent . */ public Tabs setAjaxBeforeLoadEvent ( ITabsAjaxEvent beforeLoadEvent ) { } }
this . ajaxEvents . put ( TabEvent . beforeLoad , beforeLoadEvent ) ; setBeforeLoadEvent ( new TabsAjaxJsScopeUiEvent ( this , TabEvent . beforeLoad ) ) ; return this ;
public class RunIfUtils { /** * Find the { @ link RunIf } annotation on the class or on a superclass in the hierarchy . * @ param klass The class to analyze . * @ return The { @ link RunIf } annotation if found , { @ code null } otherwise . */ private static RunIf findRunIfAnnotation ( Class < ? > klass ) { } }
while ( klass != null && klass != Object . class ) { RunIf annotation = klass . getAnnotation ( RunIf . class ) ; if ( annotation != null ) { return annotation ; } klass = klass . getSuperclass ( ) ; } return null ;
public class FeatureTiles { /** * Draw a tile image from the x , y , and zoom level by querying all features . * This could be very slow if there are a lot of features * @ param x * x coordinate * @ param y * y coordinate * @ param zoom * zoom level * @ return drawn image , or null */ public BufferedImage drawTileQueryAll ( int x , int y , int zoom ) { } }
BoundingBox boundingBox = TileBoundingBoxUtils . getWebMercatorBoundingBox ( x , y , zoom ) ; BufferedImage image = null ; // Query for all features FeatureResultSet resultSet = featureDao . queryForAll ( ) ; try { int totalCount = resultSet . getCount ( ) ; // Draw if at least one geometry exists if ( totalCount > 0 ) { if ( maxFeaturesPerTile == null || totalCount <= maxFeaturesPerTile ) { // Draw the tile image image = drawTile ( zoom , boundingBox , resultSet ) ; } else if ( maxFeaturesTileDraw != null ) { // Draw the unindexed max features tile image = maxFeaturesTileDraw . drawUnindexedTile ( tileWidth , tileHeight , totalCount , resultSet ) ; } } } finally { resultSet . close ( ) ; } return image ;
public class MessageItem { /** * Javadoc inherited */ @ Override public long getExpiryStartTime ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getExpiryStartTime" ) ; // Set the expiry to start from the last time we updated the wait time . long startTime = getCurrentMEArrivalTimestamp ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getExpiryStartTime" , Long . valueOf ( startTime ) ) ; return startTime ;
public class BasicMonthOfYearDateTimeField { public long add ( long instant , long months ) { } }
int i_months = ( int ) months ; if ( i_months == months ) { return add ( instant , i_months ) ; } // Copied from add ( long , int ) and modified slightly : long timePart = iChronology . getMillisOfDay ( instant ) ; int thisYear = iChronology . getYear ( instant ) ; int thisMonth = iChronology . getMonthOfYear ( instant , thisYear ) ; long yearToUse ; long monthToUse = thisMonth - 1 + months ; if ( monthToUse >= 0 ) { yearToUse = thisYear + ( monthToUse / iMax ) ; monthToUse = ( monthToUse % iMax ) + 1 ; } else { yearToUse = thisYear + ( monthToUse / iMax ) - 1 ; monthToUse = Math . abs ( monthToUse ) ; int remMonthToUse = ( int ) ( monthToUse % iMax ) ; if ( remMonthToUse == 0 ) { remMonthToUse = iMax ; } monthToUse = iMax - remMonthToUse + 1 ; if ( monthToUse == 1 ) { yearToUse += 1 ; } } if ( yearToUse < iChronology . getMinYear ( ) || yearToUse > iChronology . getMaxYear ( ) ) { throw new IllegalArgumentException ( "Magnitude of add amount is too large: " + months ) ; } int i_yearToUse = ( int ) yearToUse ; int i_monthToUse = ( int ) monthToUse ; int dayToUse = iChronology . getDayOfMonth ( instant , thisYear , thisMonth ) ; int maxDay = iChronology . getDaysInYearMonth ( i_yearToUse , i_monthToUse ) ; if ( dayToUse > maxDay ) { dayToUse = maxDay ; } long datePart = iChronology . getYearMonthDayMillis ( i_yearToUse , i_monthToUse , dayToUse ) ; return datePart + timePart ;
public class NeoUtils { /** * load the database configuration map for neo4j databases * @ param config * Graphity configuration containing the configuration values * @ return neo4j database configuration map */ private static Map < String , String > loadDatabaseConfig ( final Configuration config ) { } }
// create database configuration final Map < String , String > databaseConfig = new HashMap < String , String > ( ) ; // fill database configuration databaseConfig . put ( "cache_type" , config . getCacheType ( ) ) ; databaseConfig . put ( "use_memory_mapped_buffers" , config . getUseMemoryMappedBuffers ( ) ) ; return databaseConfig ;
public class HttpClient { /** * Set the unauthenticated proxy server to use . By default no proxy is configured . * @ param proxy The proxy server , e . g . { @ code new URI ( " http : / / my . proxy . com : 8000 " ) } */ public void setProxy ( URI proxy ) { } }
this . getParams ( ) . setParameter ( ConnRoutePNames . DEFAULT_PROXY , new HttpHost ( proxy . getHost ( ) , proxy . getPort ( ) , proxy . getScheme ( ) ) ) ;
public class AppsInner { /** * Check if an IoT Central application subdomain is available . * @ param operationInputs Set the name parameter in the OperationInputs structure to the subdomain of the IoT Central application to check . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < AppAvailabilityInfoInner > checkSubdomainAvailabilityAsync ( OperationInputs operationInputs , final ServiceCallback < AppAvailabilityInfoInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( checkSubdomainAvailabilityWithServiceResponseAsync ( operationInputs ) , serviceCallback ) ;
public class AppServiceCertificateOrdersInner { /** * Retrieve the list of certificate actions . * Retrieve the list of certificate actions . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the certificate order . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; CertificateOrderActionInner & gt ; object */ public Observable < List < CertificateOrderActionInner > > retrieveCertificateActionsAsync ( String resourceGroupName , String name ) { } }
return retrieveCertificateActionsWithServiceResponseAsync ( resourceGroupName , name ) . map ( new Func1 < ServiceResponse < List < CertificateOrderActionInner > > , List < CertificateOrderActionInner > > ( ) { @ Override public List < CertificateOrderActionInner > call ( ServiceResponse < List < CertificateOrderActionInner > > response ) { return response . body ( ) ; } } ) ;
public class MultiphaseOverallContext { /** * Transform an operation for a server . This will also delegate to the host - controller result - transformer . */ public OperationTransformer . TransformedOperation transformServerOperation ( final String hostName , final TransformingProxyController remoteProxyController , final Transformers . TransformationInputs transformationInputs , final ModelNode original ) throws OperationFailedException { } }
final OperationTransformer . TransformedOperation transformed = remoteProxyController . transformOperation ( transformationInputs , original ) ; final HostControllerUpdateTask . ExecutedHostRequest hostRequest = finalResultFutures . get ( hostName ) ; if ( hostRequest == null ) { // in case it ' s local hosts - controller return transformed ; } return new OperationTransformer . TransformedOperation ( transformed . getTransformedOperation ( ) , new OperationResultTransformer ( ) { @ Override public ModelNode transformResult ( ModelNode result ) { final ModelNode step1 = transformed . transformResult ( result ) ; return hostRequest . transformResult ( step1 ) ; } } ) ;
public class SanitizedContents { /** * Wraps an assumed - safe trusted _ resource _ uri constant . * < p > This only accepts compile - time constants , based on the assumption that trusted resource URIs * that are controlled by the application ( and not user input ) are considered safe . */ public static SanitizedContent constantTrustedResourceUri ( @ CompileTimeConstant final String constant ) { } }
return fromConstant ( constant , ContentKind . TRUSTED_RESOURCE_URI , Dir . LTR ) ;
public class DynamicPipelineServiceImpl { /** * Computes the build stage of the pipeline . * Given a list of builds and commits , this method will associate builds to commits and then * add commits to the build stage of the pipeline . Only commits that are tracked by our dashboard * are added meaning that if a build builds some other branch the commit information for that branch * will not be put into the pipeline . * Note : At present some extraneous builds may be processed due to limitations in the jenkins api * when there are multiple branches being built by the same job . * @ param pipeline * @ param buildsa list of builds sorted descending by build number * @ param commits */ protected void processBuilds ( Pipeline pipeline , List < Build > builds , List < Commit > commits ) { } }
// sort again in case code changes in future to be safe List < Build > sortedBuilds = new ArrayList < > ( builds ) ; Collections . sort ( sortedBuilds , BUILD_NUMBER_COMPATATOR ) ; Multimap < ObjectId , Commit > buildCommits = buildBuildToCommitsMap ( sortedBuilds , commits ) ; if ( logger . isDebugEnabled ( ) ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "\n===== Build Commit Mapping =====\n" ) ; for ( Build build : sortedBuilds ) { sb . append ( " - " + build . getBuildUrl ( ) + " -> " ) ; Collection < Commit > commitsForBuild = buildCommits . get ( build . getId ( ) ) ; if ( commitsForBuild != null && ! commitsForBuild . isEmpty ( ) ) { boolean hasPrinted = false ; for ( Commit commit : commitsForBuild ) { if ( hasPrinted ) { sb . append ( ", " ) ; } sb . append ( commit . getId ( ) ) ; hasPrinted = true ; } } else { sb . append ( "(NONE) - No commits for build exists/found." ) ; } sb . append ( "\n" ) ; } logger . debug ( sb . toString ( ) ) ; } Set < String > seenRevisionNumbers = new HashSet < > ( ) ; Build latestSuccessfulBuild = null ; Build lastSuccessfulBuild = null ; for ( Build build : sortedBuilds ) { boolean isSuccessful = BuildStatus . Success . equals ( build . getBuildStatus ( ) ) ; if ( isSuccessful ) { lastSuccessfulBuild = build ; if ( latestSuccessfulBuild == null ) { latestSuccessfulBuild = build ; } } if ( isSuccessful || ( lastSuccessfulBuild != null ) ) { Collection < Commit > commitsForBuild = buildCommits . get ( build . getId ( ) ) ; /* * If the build belongs to a branch that has commits we are not tracking or if * the commit is greater than 90 days old this will be null as we will not have * a corresponding commit from our commits collection . This is desired as we don ' t * want to track commits outside of our window or commits that belong to different * branches . */ if ( commitsForBuild != null ) { for ( Commit commit : commitsForBuild ) { boolean commitNotSeen = seenRevisionNumbers . add ( commit . getScmRevisionNumber ( ) ) ; /* * Multiple builds may reference the same commit . For example , a failed build followed by a * successful build may reference the same commit . We will use the first time we come across * the commit as the build it belongs to . */ if ( commitNotSeen ) { long timestamp = isSuccessful ? build . getStartTime ( ) : lastSuccessfulBuild . getStartTime ( ) ; pipeline . addCommit ( PipelineStage . BUILD . getName ( ) , new PipelineCommit ( commit , timestamp ) ) ; } } } } } // Gather commits that didn ' t participate in a build for one reason or another but have been processed // For now use what is in BuildEventListener . . . this may need to be improved upon in the future if ( latestSuccessfulBuild != null ) { for ( Commit commit : commits ) { if ( seenRevisionNumbers . contains ( commit . getScmRevisionNumber ( ) ) ) { continue ; } if ( commit . getScmCommitTimestamp ( ) < latestSuccessfulBuild . getStartTime ( ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "processBuilds adding orphaned build commit " + commit . getScmRevisionNumber ( ) ) ; } pipeline . addCommit ( PipelineStage . BUILD . getName ( ) , new PipelineCommit ( commit , commit . getScmCommitTimestamp ( ) ) ) ; } } }
public class BigFileSearcher { /** * Search bytes from big file faster with realtime result callback < br > * < br > * This callbacks the result in real time , but since the concurrency is * inferior to # searchBigFile , so the execution speed is slower than * # searchBigFile * @ param f * targetFile * @ param searchBytes * sequence of bytes you want to search * @ param listener * callback for progress and realtime result */ public List < Long > searchBigFileRealtime ( File f , byte [ ] searchBytes , OnRealtimeResultListener listener ) { } }
return searchBigFileRealtime ( f , searchBytes , 0 , listener ) ;
public class OperaBoolean { /** * Whether string holds a boolean - like value . It should equal " 0 " , " 1 " , " true " or " false " . This * method says nothing about whether the object is true or false . * @ param string string to check * @ return true if value is " boolesque " , false otherwise * @ throws IllegalArgumentException if parameter is not a boolesque value ( " 1 " , " true " , " 0 " , * " false " ) * @ throws NullPointerException if parameter is null */ public static boolean isBoolesque ( String string ) { } }
checkNotNull ( string ) ; assertBoolesque ( string ) ; return isFalsy ( string ) || isTruthy ( string ) ;
public class StringObservable { /** * Gather up all of the strings in to one string to be able to use it as one message . Don ' t use * this on infinite streams . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / St . stringConcat . png " alt = " " > * @ param src * @ return the Observable returing all strings concatenated as a single string */ public static Observable < String > stringConcat ( Observable < String > src ) { } }
return toString ( src . reduce ( new StringBuilder ( ) , new Func2 < StringBuilder , String , StringBuilder > ( ) { @ Override public StringBuilder call ( StringBuilder a , String b ) { return a . append ( b ) ; } } ) ) ;
public class JBaseScreen { /** * Get this field ( or return null if this field doesn ' t belong on the screen ) . * This is the method to use to filter the items to display on the screen . * @ param The index of this field in the record . * @ return The fieldinfo object . */ public Converter getFieldForScreen ( int iIndex ) { } }
if ( this . getFieldList ( ) != null ) { FieldInfo fieldInfo = this . getFieldList ( ) . getField ( iIndex ) ; if ( fieldInfo != null ) { if ( fieldInfo . isHidden ( ) ) return SKIP_THIS_FIELD ; return fieldInfo . getFieldConverter ( ) ; } } return null ;
public class SeaGlassInternalShadowEffect { /** * Create the gradient for the left of a rectangular shadow . * @ param s the shape of the gradient . This is only used for its bounds . * @ return the gradient . */ public Paint getLeftShadowGradient ( Shape s ) { } }
Rectangle2D bounds = s . getBounds2D ( ) ; float minX = ( float ) bounds . getMinX ( ) ; float maxX = ( float ) bounds . getMaxX ( ) ; float midY = ( float ) bounds . getCenterY ( ) ; return new LinearGradientPaint ( minX , midY , maxX , midY , ( new float [ ] { 0f , 1f } ) , new Color [ ] { innerShadow . bottom , transparentColor } ) ;
public class BatchReplaceMain { /** * replaceFile . * @ param fileName a { @ link java . lang . String } object . * @ param profiles a { @ link java . util . Map } object . * @ param charset a { @ link java . lang . String } object . * @ throws java . lang . Exception if any . * @ throws java . io . FileNotFoundException if any . */ public static void replaceFile ( String fileName , final Map < String , List < Replacer > > profiles , Charset charset ) throws Exception , FileNotFoundException { } }
File file = new File ( fileName ) ; if ( file . isFile ( ) && ! file . isHidden ( ) ) { List < Replacer > replacers = profiles . get ( Strings . substringAfterLast ( fileName , "." ) ) ; if ( null == replacers ) { return ; } logger . info ( "processing {}" , fileName ) ; String filecontent = Files . readFileToString ( file , charset ) ; filecontent = Replacer . process ( filecontent , replacers ) ; writeToFile ( filecontent , fileName , charset ) ; } else { String [ ] subFiles = file . list ( new FilenameFilter ( ) { public boolean accept ( File dir , String name ) { if ( dir . isDirectory ( ) ) return true ; boolean matched = false ; for ( String key : profiles . keySet ( ) ) { matched = name . endsWith ( key ) ; if ( matched ) return true ; } return false ; } } ) ; if ( null != subFiles ) { for ( int i = 0 ; i < subFiles . length ; i ++ ) { replaceFile ( fileName + '/' + subFiles [ i ] , profiles , charset ) ; } } }
public class FileContentManager { /** * Saves the { @ link String } content to the specified location using the specified { @ link java . nio . charset . Charset } . * @ param content * @ param charset * @ param location * @ return */ @ Override public boolean save ( String content , Charset charset , String location ) { } }
File file = new File ( location ) ; try { Files . writeToFile ( file , content , charset ) ; } catch ( IOException ex ) { return false ; } return true ;
public class BluetoothService { /** * secure : Socket Security type - Secure ( true ) , Insecure ( false ) */ public synchronized void connect ( BluetoothDevice device ) { } }
// Cancel any thread attempting to make a connection if ( mState == BluetoothService . STATE_CONNECTING ) { if ( mConnectThread != null ) { mConnectThread . cancel ( ) ; mConnectThread = null ; } } // Cancel any thread currently running a connection if ( mConnectedThread != null ) { mConnectedThread . cancel ( ) ; mConnectedThread = null ; } // Start the thread to tryConnect with the given device mConnectThread = new ConnectThread ( device ) ; mConnectThread . start ( ) ; setState ( BluetoothService . STATE_CONNECTING ) ;
public class ValueAnimator { /** * This function is called immediately before processing the first animation * frame of an animation . If there is a nonzero < code > startDelay < / code > , the * function is called after that delay ends . * It takes care of the final initialization steps for the * animation . * < p > Overrides of this method should call the superclass method to ensure * that internal mechanisms for the animation are set up correctly . < / p > */ void initAnimation ( ) { } }
if ( ! mInitialized ) { int numValues = mValues . length ; for ( int i = 0 ; i < numValues ; ++ i ) { mValues [ i ] . init ( ) ; } mInitialized = true ; }
public class SarlBatchCompiler { /** * Reports the given error message . * @ param message the warning message . * @ param parameters the values of the parameters that must be dynamically replaced within the message text . * @ since 0.8 */ protected void reportInternalError ( String message , Object ... parameters ) { } }
getLogger ( ) . error ( message , parameters ) ; if ( getReportInternalProblemsAsIssues ( ) ) { final org . eclipse . emf . common . util . URI uri = null ; final Issue . IssueImpl issue = new Issue . IssueImpl ( ) ; issue . setCode ( INTERNAL_ERROR_CODE ) ; issue . setMessage ( message ) ; issue . setUriToProblem ( uri ) ; issue . setSeverity ( Severity . ERROR ) ; notifiesIssueMessageListeners ( issue , uri , message ) ; }
public class HighriskSpecialPrefixReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return HighriskSpecialPrefix ResourceSet */ @ Override public ResourceSet < HighriskSpecialPrefix > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class OssIndexAnalyzer { /** * Helper to complain if unable to parse Package - URL . */ @ Nullable private PackageUrl parsePackageUrl ( final String value ) { } }
try { return PackageUrl . parse ( value ) ; } catch ( PackageUrl . InvalidException e ) { log . warn ( "Invalid Package-URL: {}" , value , e ) ; return null ; }
public class TransactionException { /** * Thrown when creating a label which starts with a reserved character Schema . ImplicitType # RESERVED */ public static TransactionException invalidLabelStart ( Label label ) { } }
return create ( String . format ( "Cannot create a label {%s} starting with character {%s} as it is a reserved starting character" , label , Schema . ImplicitType . RESERVED . getValue ( ) ) ) ;
public class StringSupport { /** * TODO get rid of this code altogether */ public static List < String > split ( String input , String punctuationChars , String quoteSymbols , boolean sort , boolean convertToLowerCase , boolean distinct ) { } }
return new StringSplitter ( input , punctuationChars , quoteSymbols , sort , convertToLowerCase , distinct , false ) . split ( ) ;
public class DestinationTools { /** * Make sure the given destination is a light - weight serializable destination reference */ public static Topic asRef ( Topic topic ) throws JMSException { } }
if ( topic == null ) return null ; if ( topic instanceof TopicRef ) return topic ; return new TopicRef ( topic . getTopicName ( ) ) ;
public class NodeModelUtils { /** * / * @ Nullable */ private static EObject findActualSemanticObjectInChildren ( /* @ NonNull */ INode node , /* @ Nullable */ EObject grammarElement ) { } }
if ( node . hasDirectSemanticElement ( ) ) return node . getSemanticElement ( ) ; AbstractRule rule = null ; if ( grammarElement instanceof RuleCall ) { rule = ( ( RuleCall ) grammarElement ) . getRule ( ) ; } else if ( grammarElement instanceof AbstractRule ) { rule = ( AbstractRule ) grammarElement ; } if ( rule instanceof ParserRule && ! GrammarUtil . isDatatypeRule ( rule ) ) { if ( node instanceof ICompositeNode ) { for ( INode child : ( ( ICompositeNode ) node ) . getChildren ( ) ) { if ( child instanceof ICompositeNode ) { EObject childGrammarElement = child . getGrammarElement ( ) ; if ( childGrammarElement instanceof Action ) { EObject result = findActualSemanticObjectInChildren ( child , childGrammarElement ) ; if ( result != null ) return result ; } else if ( childGrammarElement instanceof RuleCall ) { RuleCall childRuleCall = ( RuleCall ) childGrammarElement ; if ( childRuleCall . getRule ( ) instanceof ParserRule && ! GrammarUtil . isDatatypeRule ( childRuleCall . getRule ( ) ) ) { EObject result = findActualSemanticObjectInChildren ( child , childRuleCall ) ; if ( result != null ) return result ; } } } } } } return null ;
public class ClasspathElementZip { /** * / * ( non - Javadoc ) * @ see io . github . classgraph . ClasspathElement # open ( * nonapi . io . github . classgraph . concurrency . WorkQueue , nonapi . io . github . classgraph . utils . LogNode ) */ @ Override void open ( final WorkQueue < ClasspathEntryWorkUnit > workQueue , final LogNode log ) throws InterruptedException { } }
if ( ! scanSpec . scanJars ) { if ( log != null ) { log . log ( "Skipping classpath element, since jar scanning is disabled: " + rawPath ) ; } skipClasspathElement = true ; return ; } final LogNode subLog = log == null ? null : log . log ( "Opening jar: " + rawPath ) ; final int plingIdx = rawPath . indexOf ( '!' ) ; final String outermostZipFilePathResolved = FastPathResolver . resolve ( FileUtils . CURR_DIR_PATH , plingIdx < 0 ? rawPath : rawPath . substring ( 0 , plingIdx ) ) ; if ( ! scanSpec . jarWhiteBlackList . isWhitelistedAndNotBlacklisted ( outermostZipFilePathResolved ) ) { if ( subLog != null ) { subLog . log ( "Skipping jarfile that is blacklisted or not whitelisted: " + rawPath ) ; } skipClasspathElement = true ; return ; } try { // Get LogicalZipFile for innermost nested jarfile Entry < LogicalZipFile , String > logicalZipFileAndPackageRoot ; try { logicalZipFileAndPackageRoot = nestedJarHandler . nestedPathToLogicalZipFileAndPackageRootMap . get ( rawPath , subLog ) ; } catch ( final NullSingletonException e ) { // Generally thrown on the second and subsequent attempt to call . get ( ) , after the first failed throw new IOException ( "Could not get logical zipfile " + rawPath + " : " + e ) ; } logicalZipFile = logicalZipFileAndPackageRoot . getKey ( ) ; if ( logicalZipFile == null ) { // Should not happen , but this keeps lgtm static analysis happy throw new IOException ( "Logical zipfile was null" ) ; } // Get the normalized path of the logical zipfile zipFilePath = FastPathResolver . resolve ( FileUtils . CURR_DIR_PATH , logicalZipFile . getPath ( ) ) ; // Get package root of jarfile final String packageRoot = logicalZipFileAndPackageRoot . getValue ( ) ; if ( ! packageRoot . isEmpty ( ) ) { packageRootPrefix = packageRoot + "/" ; } } catch ( final IOException | IllegalArgumentException e ) { if ( subLog != null ) { subLog . log ( "Could not open jarfile " + rawPath + " : " + e ) ; } skipClasspathElement = true ; return ; } if ( ! scanSpec . enableSystemJarsAndModules && logicalZipFile . isJREJar ) { // Found a blacklisted JRE jar that was not caught by filtering for rt . jar in ClasspathFinder // ( the isJREJar value was set by detecting JRE headers in the jar ' s manifest file ) if ( subLog != null ) { subLog . log ( "Ignoring JRE jar: " + rawPath ) ; } skipClasspathElement = true ; return ; } if ( ! logicalZipFile . isWhitelistedAndNotBlacklisted ( scanSpec . jarWhiteBlackList ) ) { if ( subLog != null ) { subLog . log ( "Skipping jarfile that is blacklisted or not whitelisted: " + rawPath ) ; } skipClasspathElement = true ; return ; } // Automatically add any nested " lib / " dirs to classpath , since not all classloaders return them // as classpath elements int childClasspathEntryIdx = 0 ; if ( scanSpec . scanNestedJars ) { for ( final FastZipEntry zipEntry : logicalZipFile . entries ) { for ( final String libDirPrefix : ClassLoaderHandlerRegistry . AUTOMATIC_LIB_DIR_PREFIXES ) { if ( zipEntry . entryNameUnversioned . startsWith ( libDirPrefix ) && zipEntry . entryNameUnversioned . endsWith ( ".jar" ) ) { final String entryPath = zipEntry . getPath ( ) ; if ( subLog != null ) { subLog . log ( "Found nested lib jar: " + entryPath ) ; } workQueue . addWorkUnit ( new ClasspathEntryWorkUnit ( /* rawClasspathEntry = */ new SimpleEntry < > ( entryPath , classLoader ) , /* parentClasspathElement = */ this , /* orderWithinParentClasspathElement = */ childClasspathEntryIdx ++ ) ) ; break ; } } } } // Don ' t add child classpath elements that are identical to this classpath element , or that are duplicates final Set < String > scheduledChildClasspathElements = new HashSet < > ( ) ; scheduledChildClasspathElements . add ( rawPath ) ; // Create child classpath elements from values obtained from Class - Path entry in manifest , resolving // the paths relative to the dir or parent jarfile that the jarfile is contained in if ( logicalZipFile . classPathManifestEntryValue != null ) { // Get parent dir of logical zipfile within grandparent slice , // e . g . for a zipfile slice path of " / path / to / jar1 . jar ! / lib / jar2 . jar " , this is " lib " , // or for " / path / to / jar1 . jar " , this is " / path / to " , or " " if the jar is in the toplevel dir . final String jarParentDir = FileUtils . getParentDirPath ( logicalZipFile . getPathWithinParentZipFileSlice ( ) ) ; // Add paths in manifest file ' s " Class - Path " entry to the classpath , resolving paths relative to // the parent directory or jar for ( final String childClassPathEltPathRelative : logicalZipFile . classPathManifestEntryValue . split ( " " ) ) { if ( ! childClassPathEltPathRelative . isEmpty ( ) ) { // Resolve Class - Path entry relative to containing dir String childClassPathEltPath = FastPathResolver . resolve ( jarParentDir , childClassPathEltPathRelative ) ; // If this is a nested jar , prepend outer jar prefix final ZipFileSlice parentZipFileSlice = logicalZipFile . getParentZipFileSlice ( ) ; if ( parentZipFileSlice != null ) { childClassPathEltPath = parentZipFileSlice . getPath ( ) + ( childClassPathEltPath . startsWith ( "/" ) ? "!" : "!/" ) + childClassPathEltPath ; } // Only add child classpath elements once if ( scheduledChildClasspathElements . add ( childClassPathEltPath ) ) { // Schedule child classpath element for scanning workQueue . addWorkUnit ( new ClasspathEntryWorkUnit ( /* rawClasspathEntry = */ new SimpleEntry < > ( childClassPathEltPath , classLoader ) , /* parentClasspathElement = */ this , /* orderWithinParentClasspathElement = */ childClasspathEntryIdx ++ ) ) ; } } } } // Add paths in an OSGi bundle jar manifest ' s " Bundle - ClassPath " entry to the classpath , resolving // the paths relative to the root of the jarfile if ( logicalZipFile . bundleClassPathManifestEntryValue != null ) { final String zipFilePathPrefix = zipFilePath + "!/" ; // Class - Path is split on " " , but Bundle - ClassPath is split on " , " for ( String childBundlePath : logicalZipFile . bundleClassPathManifestEntryValue . split ( "," ) ) { // Assume that Bundle - ClassPath paths have to be given relative to jarfile root while ( childBundlePath . startsWith ( "/" ) ) { childBundlePath = childBundlePath . substring ( 1 ) ; } // Currently the position of " . " relative to child classpath entries is ignored ( the // Bundle - ClassPath path is treated as if " . " is in the first position , since child // classpath entries are always added to the classpath after the parent classpath // entry that they were obtained from ) . if ( ! childBundlePath . isEmpty ( ) && ! childBundlePath . equals ( "." ) ) { // Resolve Bundle - ClassPath entry within jar final String childClassPathEltPath = zipFilePathPrefix + FileUtils . sanitizeEntryPath ( childBundlePath , /* removeInitialSlash = */ true ) ; // Only add child classpath elements once if ( scheduledChildClasspathElements . add ( childClassPathEltPath ) ) { // Schedule child classpath element for scanning workQueue . addWorkUnit ( new ClasspathEntryWorkUnit ( /* rawClasspathEntry = */ new SimpleEntry < > ( childClassPathEltPath , classLoader ) , /* parentClasspathElement = */ this , /* orderWithinParentClasspathElement = */ childClasspathEntryIdx ++ ) ) ; } } } }
public class S3AInputStream { /** * Opens a new stream at mPos if the wrapped stream mIn is null . */ private void openStream ( ) throws IOException { } }
if ( mIn != null ) { // stream is already open return ; } GetObjectRequest getReq = new GetObjectRequest ( mBucketName , mKey ) ; // If the position is 0 , setting range is redundant and causes an error if the file is 0 length if ( mPos > 0 ) { getReq . setRange ( mPos ) ; } AmazonS3Exception lastException = null ; while ( mRetryPolicy . attempt ( ) ) { try { mIn = mClient . getObject ( getReq ) . getObjectContent ( ) ; return ; } catch ( AmazonS3Exception e ) { LOG . warn ( "Attempt {} to open key {} in bucket {} failed with exception : {}" , mRetryPolicy . getAttemptCount ( ) , mKey , mBucketName , e . toString ( ) ) ; if ( e . getStatusCode ( ) != HttpStatus . SC_NOT_FOUND ) { throw new IOException ( e ) ; } // Key does not exist lastException = e ; } } // Failed after retrying key does not exist throw new IOException ( lastException ) ;
public class AdHocCommandManager { /** * Discover the commands of an specific JID . The < code > jid < / code > is a * full JID . * @ param jid the full JID to retrieve the commands for . * @ return the discovered items . * @ throws XMPPException if the operation failed for some reason . * @ throws SmackException if there was no response from the server . * @ throws InterruptedException */ public DiscoverItems discoverCommands ( Jid jid ) throws XMPPException , SmackException , InterruptedException { } }
return serviceDiscoveryManager . discoverItems ( jid , NAMESPACE ) ;
public class servicegroup_binding { /** * Use this API to fetch servicegroup _ binding resource of given name . */ public static servicegroup_binding get ( nitro_service service , String servicegroupname ) throws Exception { } }
servicegroup_binding obj = new servicegroup_binding ( ) ; obj . set_servicegroupname ( servicegroupname ) ; servicegroup_binding response = ( servicegroup_binding ) obj . get_resource ( service ) ; return response ;
public class MavenConverter { /** * Converts MavenDepedency to Dependency representation used in Aether * @ param dependency * the Maven dependency * @ param registry * A registry of known artifact types . * @ return the corresponding Aether dependency */ public static Dependency asDependency ( MavenDependencySPI dependency , ArtifactTypeRegistry registry ) { } }
/* * Allow for undeclared scopes */ String scope = dependency . getScope ( ) . toString ( ) ; if ( dependency . isUndeclaredScope ( ) ) { scope = EMPTY ; } return new Dependency ( asArtifact ( dependency , registry ) , scope , dependency . isOptional ( ) , asExclusions ( dependency . getExclusions ( ) ) ) ;
public class RestApiConfig { /** * Enables provided REST endpoint groups . It doesn ' t replace already enabled groups . */ public RestApiConfig enableGroups ( RestEndpointGroup ... endpointGroups ) { } }
if ( endpointGroups != null ) { enabledGroups . addAll ( Arrays . asList ( endpointGroups ) ) ; } return this ;
public class SeaGlassRootPaneUI { /** * Invokes supers implementation of < code > installUI < / code > to install the * necessary state onto the passed in < code > JRootPane < / code > to render the * metal look and feel implementation of < code > RootPaneUI < / code > . If the * < code > windowDecorationStyle < / code > property of the < code > JRootPane < / code > * is other than < code > JRootPane . NONE < / code > , this will add a custom < code > * Component < / code > to render the widgets to < code > JRootPane < / code > , as well * as installing a custom < code > Border < / code > and < code > LayoutManager < / code > * on the < code > JRootPane < / code > . * @ param c the JRootPane to install state onto . */ public void installUI ( JComponent c ) { } }
super . installUI ( c ) ; root = ( JRootPane ) c ; updateTextured ( ) ; int style = root . getWindowDecorationStyle ( ) ; Container parent = root . getParent ( ) ; if ( parent != null && ( parent instanceof JFrame || parent instanceof JDialog ) && style != JRootPane . NONE ) { installClientDecorations ( root ) ; }
public class CommerceDiscountPersistenceImpl { /** * Returns the first commerce discount in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce discount * @ throws NoSuchDiscountException if a matching commerce discount could not be found */ @ Override public CommerceDiscount findByGroupId_First ( long groupId , OrderByComparator < CommerceDiscount > orderByComparator ) throws NoSuchDiscountException { } }
CommerceDiscount commerceDiscount = fetchByGroupId_First ( groupId , orderByComparator ) ; if ( commerceDiscount != null ) { return commerceDiscount ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; throw new NoSuchDiscountException ( msg . toString ( ) ) ;
public class TextMateGenerator2 { /** * Generate the rules for the punctuation symbols . * @ param punctuation the punctuation symbols . * @ return the rules . */ protected List < Map < String , ? > > generatePunctuation ( Set < String > punctuation ) { } }
final List < Map < String , ? > > list = new ArrayList < > ( ) ; if ( ! punctuation . isEmpty ( ) ) { list . add ( pattern ( it -> { it . matches ( orRegex ( punctuation ) ) ; it . style ( PUNCTUATION_STYLE ) ; it . comment ( "Operators and Punctuations" ) ; // $ NON - NLS - 1 $ } ) ) ; } return list ;
public class LdapConfigManager { /** * public List < String > getRDNProperties ( String qualifiedEntityType , Map < String , Object > configProps , Set < String > pids , ConfigurationAdmin configAdmin ) { * / / TODO : : Extract Ldap Entity RDN * return null ; */ private void setGroupSearchScope ( Map < String , Object > configProps ) { } }
if ( configProps . get ( "recursiveSearch" ) != null && configProps . get ( "recursiveSearch" ) instanceof Boolean ) iRecursiveSearch = ( Boolean ) configProps . get ( "recursiveSearch" ) ;
public class StyleHelper { /** * called from { @ link BaseReportGenerator } * @ param stylerFactory */ @ Override public void setStylerFactory ( StylerFactory stylerFactory ) { } }
this . stylerFactory = stylerFactory ; try { FONT . initialize ( stylerFactory . getSettings ( ) ) ; } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex ) { throw new VectorPrintRuntimeException ( ex ) ; }
public class MemorySegmentFactory { /** * Allocates some unpooled off - heap memory and creates a new memory segment that * represents that memory . * @ param size The size of the off - heap memory segment to allocate . * @ param owner The owner to associate with the off - heap memory segment . * @ return A new memory segment , backed by unpooled off - heap memory . */ public static MemorySegment allocateUnpooledOffHeapMemory ( int size , Object owner ) { } }
ByteBuffer memory = ByteBuffer . allocateDirect ( size ) ; return wrapPooledOffHeapMemory ( memory , owner ) ;
public class NetworkUtils { /** * 获取当前网络类型 * @ return 返回网络类型 */ public static NetworkType getNetworkType ( Context context ) { } }
ConnectivityManager cm = ( ConnectivityManager ) context . getSystemService ( Context . CONNECTIVITY_SERVICE ) ; final NetworkInfo info = cm . getActiveNetworkInfo ( ) ; if ( info == null || ! info . isConnectedOrConnecting ( ) ) { return NetworkType . NONE ; } int type = info . getType ( ) ; if ( ConnectivityManager . TYPE_WIFI == type ) { return NetworkType . WIFI ; } else if ( ConnectivityManager . TYPE_MOBILE == type ) { return NetworkType . MOBILE ; } else { return NetworkType . OTHER ; }
public class ThreadCacheContext { public static Map < String , Object > getReadOnlyCacheMap ( ) { } }
// for framework if ( ! exists ( ) ) { return DfCollectionUtil . emptyMap ( ) ; } return Collections . unmodifiableMap ( threadLocal . get ( ) ) ;
public class LRUCache { /** * Put . * @ param key the key * @ param value the value */ public void put ( final K key , final V value ) { } }
if ( map . containsKey ( key ) ) { // remove the key from the FIFO queue queue . remove ( key ) ; } while ( queue . size ( ) >= maxSize ) { K oldestKey = queue . poll ( ) ; if ( null != oldestKey ) { if ( listener != null ) { listener . onRemove ( map . remove ( oldestKey ) ) ; } else { map . remove ( oldestKey ) ; } } } queue . add ( key ) ; map . put ( key , value ) ;
public class LambdaToMethod { /** * Signature Generation */ private String typeSig ( Type type ) { } }
L2MSignatureGenerator sg = new L2MSignatureGenerator ( ) ; sg . assembleSig ( type ) ; return sg . toString ( ) ;
public class Facebook { /** * Restore the token , expiration time , and last update time from cached values . * These should be values obtained from getAccessToken ( ) , getAccessExpires , and * getLastAccessUpdate ( ) respectively . * This method is deprecated . See { @ link Facebook } and { @ link Session } for more info . * @ param accessToken - access token * @ param accessExpires - access token expiration time * @ param lastAccessUpdate - timestamp of the last token update */ @ Deprecated public void setTokenFromCache ( String accessToken , long accessExpires , long lastAccessUpdate ) { } }
checkUserSession ( "setTokenFromCache" ) ; synchronized ( this . lock ) { this . accessToken = accessToken ; accessExpiresMillisecondsAfterEpoch = accessExpires ; lastAccessUpdateMillisecondsAfterEpoch = lastAccessUpdate ; }
public class Event { /** * getter for causes _ protein - gets * @ generated * @ return value of the feature */ public FSArray getCauses_protein ( ) { } }
if ( Event_Type . featOkTst && ( ( Event_Type ) jcasType ) . casFeat_causes_protein == null ) jcasType . jcas . throwFeatMissing ( "causes_protein" , "ch.epfl.bbp.uima.genia.Event" ) ; return ( FSArray ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Event_Type ) jcasType ) . casFeatCode_causes_protein ) ) ) ;
public class ns_config_diff { /** * < pre > * Use this operation to get config diff between source and target configuration files in the tabular format in bulk . * < / pre > */ public static ns_config_diff [ ] diff_table ( nitro_service client , ns_config_diff [ ] resources ) throws Exception { } }
if ( resources == null ) throw new Exception ( "Null resource array" ) ; if ( resources . length == 1 ) return ( ( ns_config_diff [ ] ) resources [ 0 ] . perform_operation ( client , "diff_table" ) ) ; return ( ( ns_config_diff [ ] ) perform_operation_bulk_request ( client , resources , "diff_table" ) ) ;