signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ExpectedConditions { /** * An expectation for checking WebElement with given locator has text with a value as a part of * it * @ param locator used to find the element * @ param pattern used as expected text matcher pattern * @ return Boolean true when element has text value containing @ value */ public static ExpectedCondition < Boolean > textMatches ( final By locator , final Pattern pattern ) { } }
return new ExpectedCondition < Boolean > ( ) { private String currentValue = null ; @ Override public Boolean apply ( WebDriver driver ) { try { currentValue = driver . findElement ( locator ) . getText ( ) ; return pattern . matcher ( currentValue ) . find ( ) ; } catch ( Exception e ) { return false ; } } @ Override public String toString ( ) { return String . format ( "text found by %s to match pattern \"%s\". Current text: \"%s\"" , locator , pattern . pattern ( ) , currentValue ) ; } } ;
public class ManagedBackupShortTermRetentionPoliciesInner { /** * Updates a managed database ' s short term retention policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param managedInstanceName The name of the managed instance . * @ param databaseName The name of the database . * @ param retentionDays The backup retention period in days . This is how many days Point - in - Time Restore will be supported . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ManagedBackupShortTermRetentionPolicyInner object if successful . */ public ManagedBackupShortTermRetentionPolicyInner beginCreateOrUpdate ( String resourceGroupName , String managedInstanceName , String databaseName , Integer retentionDays ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , databaseName , retentionDays ) . toBlocking ( ) . single ( ) . body ( ) ;
public class SecondsBasedEntryTaskScheduler { /** * Removes the entry from being scheduled to be evicted . * Cleans up parent container ( second - > entries map ) if it doesn ' t hold anymore items this second . * Cancels associated scheduler ( second - > scheduler map ) if there are no more items to remove for this second . * Returns associated scheduled entry . * @ param second second at which this entry was scheduled to be evicted * @ param entries entries which were already scheduled to be evicted for this second * @ param key entry key * @ return associated scheduled entry */ private ScheduledEntry < K , V > cancelAndCleanUpIfEmpty ( Integer second , Map < Object , ScheduledEntry < K , V > > entries , Object key ) { } }
ScheduledEntry < K , V > result = entries . remove ( key ) ; cleanUpScheduledFuturesIfEmpty ( second , entries ) ; return result ;
public class OntopNativeMappingParser { /** * TODO : describe * TODO : follow the advice of IntelliJ : split this method to make its workflow tractable . * @ param reader * @ param invalidMappingIndicators Read - write list of error indicators . * @ return The updated mapping set of the current source * @ throws IOException */ private static List < SQLPPTriplesMap > readMappingDeclaration ( LineNumberReader reader , List < TargetQueryParser > parsers , List < Indicator > invalidMappingIndicators ) throws IOException { } }
List < SQLPPTriplesMap > currentSourceMappings = new ArrayList < > ( ) ; String mappingId = "" ; String currentLabel = "" ; // the reader is working on which label StringBuffer sourceQuery = null ; ImmutableList < TargetAtom > targetQuery = null ; int wsCount = 0 ; // length of whitespace used as the separator boolean isMappingValid = true ; // a flag to load the mapping to the model if valid String line ; for ( line = reader . readLine ( ) ; line != null && ! line . trim ( ) . equals ( END_COLLECTION_SYMBOL ) ; line = reader . readLine ( ) ) { int lineNumber = reader . getLineNumber ( ) ; if ( line . isEmpty ( ) ) { if ( ! mappingId . isEmpty ( ) ) { // Save the mapping to the model ( if valid ) at this point if ( isMappingValid ) { currentSourceMappings = addNewMapping ( mappingId , sourceQuery . toString ( ) , targetQuery , currentSourceMappings ) ; mappingId = "" ; sourceQuery = null ; targetQuery = null ; } } else { isMappingValid = true ; } continue ; } if ( isCommentLine ( line ) ) { continue ; // skip the comment line } if ( ! isMappingValid ) { continue ; // skip if the mapping is invalid } String [ ] tokens = line . split ( "[\t| ]+" , 2 ) ; String label ; String value ; if ( tokens . length > 1 ) { label = tokens [ 0 ] . trim ( ) ; value = tokens [ 1 ] . trim ( ) ; } else { value = tokens [ 0 ] ; label = "" ; } if ( ! label . isEmpty ( ) ) { currentLabel = tokens [ 0 ] ; } if ( currentLabel . equals ( Label . mappingId . name ( ) ) ) { mappingId = value ; if ( mappingId . isEmpty ( ) ) { // empty or not invalidMappingIndicators . add ( new Indicator ( lineNumber , Label . mappingId , MAPPING_ID_IS_BLANK ) ) ; isMappingValid = false ; } } else if ( currentLabel . equals ( Label . target . name ( ) ) ) { String targetString = value ; if ( targetString . isEmpty ( ) ) { // empty or not invalidMappingIndicators . add ( new Indicator ( lineNumber , mappingId , TARGET_QUERY_IS_BLANK ) ) ; isMappingValid = false ; } else { // Load the target query try { targetQuery = loadTargetQuery ( targetString , parsers ) ; } catch ( UnparsableTargetQueryException e ) { invalidMappingIndicators . add ( new Indicator ( lineNumber , new String [ ] { mappingId , targetString , e . getMessage ( ) } , ERROR_PARSING_TARGET_QUERY ) ) ; isMappingValid = false ; } } } else if ( currentLabel . equals ( Label . source . name ( ) ) ) { String sourceString = value ; if ( sourceString . isEmpty ( ) ) { // empty or not invalidMappingIndicators . add ( new Indicator ( lineNumber , mappingId , SOURCE_QUERY_IS_BLANK ) ) ; isMappingValid = false ; } else { // Build the source query string . if ( sourceQuery == null ) { sourceQuery = new StringBuffer ( ) ; sourceQuery . append ( sourceString ) ; } else { sourceQuery . append ( "\n" ) ; sourceQuery . append ( sourceString ) ; } } } else { String msg = String . format ( "Unknown parameter name \"%s\" at line: %d." , tokens [ 0 ] , lineNumber ) ; throw new IOException ( msg ) ; } } if ( line == null ) { throw new IOException ( String . format ( "End collection symbol %s is missing." , END_COLLECTION_SYMBOL ) ) ; } // Save the last mapping entry to the model if ( ! mappingId . isEmpty ( ) && isMappingValid ) { currentSourceMappings = addNewMapping ( mappingId , sourceQuery . toString ( ) , targetQuery , currentSourceMappings ) ; } return currentSourceMappings ;
public class Sets { /** * Creates a < i > mutable < / i > { @ code HashSet } instance containing the given elements in unspecified order . * @ param elements the elements that the set should contain * @ return a new { @ code HashSet } containing those elements ( minus duplicates ) */ public static < E > HashSet < E > newHashSet ( Iterator < ? extends E > elements ) { } }
HashSet < E > set = newHashSet ( ) ; while ( elements . hasNext ( ) ) { set . add ( elements . next ( ) ) ; } return set ;
public class AnalysisContext { /** * Returns a new analysis context builder that extracts the information about the available extensions from * the provided Revapi instance . * < p > The extensions have to be known so that both old and new style of configuration can be usefully worked with . * @ param revapi the revapi instance to read the available extensions from * @ return a new analysis context builder */ @ Nonnull public static Builder builder ( Revapi revapi ) { } }
List < String > knownExtensionIds = new ArrayList < > ( ) ; addExtensionIds ( revapi . getPipelineConfiguration ( ) . getApiAnalyzerTypes ( ) , knownExtensionIds ) ; addExtensionIds ( revapi . getPipelineConfiguration ( ) . getTransformTypes ( ) , knownExtensionIds ) ; addExtensionIds ( revapi . getPipelineConfiguration ( ) . getFilterTypes ( ) , knownExtensionIds ) ; addExtensionIds ( revapi . getPipelineConfiguration ( ) . getReporterTypes ( ) , knownExtensionIds ) ; return new Builder ( knownExtensionIds ) ;
public class Async { /** * Return an Observable that calls the given Callable and emits its result or Exception when an Observer * subscribes . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / fromCallable . png " alt = " " > * The Callable is called on the default thread pool for computation . * @ param < R > the return type * @ param callable the callable to call on each subscription * @ return an Observable that calls the given Callable and emits its result or Exception when an Observer * subscribes * @ see # start ( rx . functions . Func0) * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Async - Operators # wiki - fromcallable " > RxJava Wiki : fromCallable ( ) < / a > */ public static < R > Observable < R > fromCallable ( Callable < ? extends R > callable ) { } }
return fromCallable ( callable , Schedulers . computation ( ) ) ;
public class AutoMlClient { /** * Lists model evaluations . * < p > Sample code : * < pre > < code > * try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) { * ModelName parent = ModelName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ MODEL ] " ) ; * for ( ModelEvaluation element : autoMlClient . listModelEvaluations ( parent . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Resource name of the model to list the model evaluations for . If modelId is set * as " - " , this will list model evaluations from across all models of the parent location . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListModelEvaluationsPagedResponse listModelEvaluations ( String parent ) { } }
ListModelEvaluationsRequest request = ListModelEvaluationsRequest . newBuilder ( ) . setParent ( parent ) . build ( ) ; return listModelEvaluations ( request ) ;
public class IAM { /** * Executes the function of adding a user to a specific group as AWS does not support bulk adding of a user to a group . * @ param providerUserId the user to be added * @ param providerGroupId the group to which the user will be added * @ throws CloudException an error occurred in the cloud provider adding this user to the specified group * @ throws InternalException an error occurred within Dasein Cloud adding the user to the group */ private void addUserToGroup ( @ Nonnull String providerUserId , @ Nonnull String providerGroupId ) throws CloudException , InternalException { } }
if ( logger . isTraceEnabled ( ) ) { logger . trace ( "ENTER: " + IAM . class . getName ( ) + ".addUserToGroup(" + providerUserId + "," + providerGroupId + ")" ) ; } try { CloudUser user = getUser ( providerUserId ) ; if ( user == null ) { throw new CloudException ( "No such user: " + providerUserId ) ; } CloudGroup group = getGroup ( providerGroupId ) ; if ( group == null ) { throw new CloudException ( "No such group: " + providerGroupId ) ; } Map < String , String > parameters = new HashMap < > ( ) ; parameters . put ( "GroupName" , group . getName ( ) ) ; parameters . put ( "UserName" , user . getUserName ( ) ) ; try { if ( logger . isInfoEnabled ( ) ) { logger . info ( "Adding " + providerUserId + " to " + providerGroupId + "..." ) ; } invoke ( IAMMethod . ADD_USER_TO_GROUP , parameters ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "Added." ) ; } } catch ( EC2Exception e ) { logger . error ( e . getSummary ( ) ) ; throw new CloudException ( e ) ; } } finally { if ( logger . isTraceEnabled ( ) ) { logger . trace ( "EXIT: " + IAM . class . getName ( ) + ".addUserToGroup()" ) ; } }
public class MyImageUtils { /** * Get the last non - white Y point * @ param img Image in memory * @ return The trimmed height */ private static int getTrimmedHeight ( BufferedImage img ) { } }
int width = img . getWidth ( ) ; int height = img . getHeight ( ) ; int trimmedHeight = 0 ; for ( int i = 0 ; i < width ; i ++ ) { for ( int j = height - 1 ; j >= 0 ; j -- ) { if ( img . getRGB ( i , j ) != Color . WHITE . getRGB ( ) && j > trimmedHeight ) { trimmedHeight = j ; break ; } } } return trimmedHeight ;
public class CmsWorkplaceManager { /** * Returns the condition definition for the given resource type that is triggered before opening the editor . < p > * @ param resourceType the resource type * @ return the condition definition for the given resource type class name or null if none is found */ public I_CmsPreEditorActionDefinition getPreEditorConditionDefinition ( I_CmsResourceType resourceType ) { } }
Iterator < I_CmsPreEditorActionDefinition > i = m_preEditorConditionDefinitions . iterator ( ) ; I_CmsPreEditorActionDefinition result = null ; int matchResult = - 1 ; while ( i . hasNext ( ) ) { I_CmsPreEditorActionDefinition currentDefinition = i . next ( ) ; if ( resourceType . getClass ( ) . isInstance ( currentDefinition . getResourceType ( ) ) ) { // now determine the match count . . . int matchDistance = 0 ; Class < ? > superClass = resourceType . getClass ( ) ; while ( true ) { // check if a super class is present if ( superClass == null ) { break ; } if ( superClass . getName ( ) . equals ( currentDefinition . getResourceType ( ) . getClass ( ) . getName ( ) ) ) { break ; } matchDistance += 1 ; superClass = superClass . getSuperclass ( ) ; } if ( matchResult != - 1 ) { if ( matchDistance < matchResult ) { matchResult = matchDistance ; result = currentDefinition ; } } else { matchResult = matchDistance ; result = currentDefinition ; } } } return result ;
public class AsynchronousRequest { /** * For more info on Character Core API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / characters # Core " > here < / a > < br / > * Get basic character information for the given character name that is linked to given API key * @ param API API key * @ param name name of character * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception invalid API key | empty character name * @ throws NullPointerException if given { @ link Callback } is empty * @ see CharacterCore basic character info */ public void getCharacterInformation ( String API , String name , Callback < CharacterCore > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ParamType . API , API ) , new ParamChecker ( ParamType . CHAR , name ) ) ; gw2API . getCharacterCore ( name , API ) . enqueue ( callback ) ;
public class SHPDriverFunction { /** * Return the shape type supported by the shapefile format * @ param meta * @ return * @ throws SQLException */ private static ShapeType getShapeTypeFromGeometryMetaData ( GeometryMetaData meta ) throws SQLException { } }
ShapeType shapeType ; switch ( meta . geometryType ) { case GeometryTypeCodes . MULTILINESTRING : case GeometryTypeCodes . LINESTRING : case GeometryTypeCodes . MULTILINESTRINGM : case GeometryTypeCodes . LINESTRINGM : case GeometryTypeCodes . MULTILINESTRINGZ : case GeometryTypeCodes . LINESTRINGZ : shapeType = meta . hasZ ? ShapeType . ARCZ : ShapeType . ARC ; break ; case GeometryTypeCodes . POINT : shapeType = meta . hasZ ? ShapeType . POINTZ : ShapeType . POINT ; break ; case GeometryTypeCodes . MULTIPOINT : shapeType = meta . hasZ ? ShapeType . MULTIPOINTZ : ShapeType . MULTIPOINT ; break ; case GeometryTypeCodes . POLYGON : case GeometryTypeCodes . MULTIPOLYGON : shapeType = meta . hasZ ? ShapeType . POLYGONZ : ShapeType . POLYGON ; break ; default : return null ; } return shapeType ;
public class Connection { /** * Sends a serializable object . */ public void writeObject ( Object o ) throws IOException { } }
ObjectOutputStream oos = AnonymousClassWarnings . checkingObjectOutputStream ( out ) ; oos . writeObject ( o ) ; // don ' t close oss , which will close the underlying stream // no need to flush either , given the way oos is implemented
public class CoverageDataPng { /** * Get the pixel values of the buffered image as 16 bit unsigned integer * values * @ param image * tile image * @ return unsigned integer pixel values */ public int [ ] getUnsignedPixelValues ( BufferedImage image ) { } }
short [ ] pixelValues = getPixelValues ( image ) ; int [ ] unsignedPixelValues = getUnsignedPixelValues ( pixelValues ) ; return unsignedPixelValues ;
public class AnnotationStringConverterFactory { /** * Finds a converter searching annotated . * @ param < T > the type of the converter * @ param cls the class to find a method for , not null * @ return the converter , not null * @ throws RuntimeException if none found */ private < T > StringConverter < T > findAnnotatedConverter ( final Class < T > cls ) { } }
Method toString = findToStringMethod ( cls ) ; // checks superclasses if ( toString == null ) { return null ; } MethodConstructorStringConverter < T > con = findFromStringConstructor ( cls , toString ) ; MethodsStringConverter < T > mth = findFromStringMethod ( cls , toString , con == null ) ; // optionally checks superclasses if ( con == null && mth == null ) { throw new IllegalStateException ( "Class annotated with @ToString but not with @FromString: " + cls . getName ( ) ) ; } if ( con != null && mth != null ) { throw new IllegalStateException ( "Both method and constructor are annotated with @FromString: " + cls . getName ( ) ) ; } return ( con != null ? con : mth ) ;
public class Neighbours { /** * Gets the Neighbour based on the UUID supplied * @ param neighbourUUID The UUID for the Neighbour * @ return the Neighbour object */ Neighbour getBusNeighbour ( String busId ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getBusNeighbour" , busId ) ; Neighbour neighbour = null ; synchronized ( _neighbours ) { Iterator neighbours = _neighbours . keySet ( ) . iterator ( ) ; while ( neighbours . hasNext ( ) ) { neighbour = getNeighbour ( ( SIBUuid8 ) neighbours . next ( ) ) ; if ( neighbour . getBusId ( ) . equals ( busId ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getBusNeighbour" , neighbour ) ; return neighbour ; } } } synchronized ( _recoveredNeighbours ) { Iterator recoveredNeighbours = _recoveredNeighbours . keySet ( ) . iterator ( ) ; while ( recoveredNeighbours . hasNext ( ) ) { neighbour = ( Neighbour ) _recoveredNeighbours . get ( recoveredNeighbours . next ( ) ) ; if ( neighbour . getBusId ( ) . equals ( busId ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getBusNeighbour" , neighbour ) ; return neighbour ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getBusNeighbour" , null ) ; return null ;
public class LogHelper { /** * Gets an input on a configuration file placed in the application assets . * @ param context * the Android context to use * @ param fileName * the file name * @ return the input stream to read the file or < code > null < / code > if the * file does not exist . */ protected static InputStream getConfigurationFileFromAssets ( Context context , String fileName ) { } }
if ( context == null ) { return null ; } AssetManager assets = context . getAssets ( ) ; if ( assets == null ) { return null ; } try { return assets . open ( fileName ) ; } catch ( IOException e ) { return null ; }
public class AutoConfigurationImportSelector { /** * Return the { @ link AutoConfigurationEntry } based on the { @ link AnnotationMetadata } * of the importing { @ link Configuration @ Configuration } class . * @ param autoConfigurationMetadata the auto - configuration metadata * @ param annotationMetadata the annotation metadata of the configuration class * @ return the auto - configurations that should be imported */ protected AutoConfigurationEntry getAutoConfigurationEntry ( AutoConfigurationMetadata autoConfigurationMetadata , AnnotationMetadata annotationMetadata ) { } }
if ( ! isEnabled ( annotationMetadata ) ) { return EMPTY_ENTRY ; } AnnotationAttributes attributes = getAttributes ( annotationMetadata ) ; List < String > configurations = getCandidateConfigurations ( annotationMetadata , attributes ) ; configurations = removeDuplicates ( configurations ) ; Set < String > exclusions = getExclusions ( annotationMetadata , attributes ) ; checkExcludedClasses ( configurations , exclusions ) ; configurations . removeAll ( exclusions ) ; configurations = filter ( configurations , autoConfigurationMetadata ) ; fireAutoConfigurationImportEvents ( configurations , exclusions ) ; return new AutoConfigurationEntry ( configurations , exclusions ) ;
public class BeanBoxUtils { /** * If aop is a instance of Aop alliance Interceptor , wrap it to a BeanBox and * set as purevalue , otherwise direct return it ( class or BeanBox ) */ protected static Object checkAOP ( Object aop ) { } }
if ( aop != null && aop instanceof MethodInterceptor ) return new BeanBox ( ) . setTarget ( aop ) . setPureValue ( true ) ; else return aop ;
public class ProfilerParseRunner { /** * Lists the work done by the most expensive failed rules . * First all failed rules are sorted according to how long they took , then , for each such rule , * a string is produced listing it and all its child rules . These are returned . * @ param topEntries Produce reports for the top { @ code topEntries } most expensive failed rules . * a negative number means : All of them . */ public List < String > getExtendedReport ( int topEntries ) { } }
TreeSet < ReportEntry < V > > topLevelFailed = new TreeSet < ReportEntry < V > > ( ) ; fillReport ( topLevelFailed , rootReport ) ; int count = topEntries ; List < String > result = Lists . newArrayList ( ) ; StringBuilder out = new StringBuilder ( ) ; for ( ReportEntry < V > entry : topLevelFailed ) { if ( count -- == 0 ) return result ; out . setLength ( 0 ) ; fillExtendedReport ( out , 0 , entry ) ; result . add ( out . toString ( ) ) ; } return result ;
public class TreeBuilder { /** * Convert data content for all resource paths matched by the path selector * @ param converter content converter * @ param selector path selection * @ return builder */ public TreeBuilder < T > convert ( ContentConverter < T > converter , PathSelector selector ) { } }
return TreeBuilder . < T > builder ( new ConverterTree < T > ( build ( ) , converter , selector ) ) ;
public class CronUtils { /** * Converting valid Quartz cron expression to valid SauronSoftware one . * The conversions are the following : * < ul > < li > & quot ; seconds & quot ; part eliminated ; < / li > * < li > numbers in & quot ; day of week & quot ; started from 0 , not from 1 as in Quartz ; < / li > * < li > & quot ; value / interval & quot ; items converted to * & quot ; value - maxValue / interval & quot ; items ; < / li > * < li > & quot ; / interval & quot ; items converted to & quot ; * & # 47 ; interval items & quot ; ; < / li > * < li > W in dates changed to MON - FRI in days of week ; < / li > * < li > L in day of week changed to 24 - L in dates . It is not equivalent conversion , * but at least back conversion is made correctly ; < / li > * < li > # n part in day of weeks is eliminated . Does anybody use it ? < / li > * < li > all question marks are changed to asterisks . < / li > * < / ul > * @ param quartzExprValid Quartz cron expression * @ returnSimilar SauronSoftware cron expression */ public static String packSchedule ( String quartzExpr ) { } }
if ( quartzExpr == null ) return null ; String [ ] exprElems = quartzExpr . trim ( ) . split ( "\\s+" ) ; if ( exprElems . length > 5 ) { // Quartz cron expression contains secs , mins , hours , dates , months , days [ , years ] // 1 . Change days of week numbering exprElems [ 5 ] = decreaseDoW ( exprElems [ 5 ] ) ; // 2 . Add interval in repeated items exprElems [ 1 ] = extendRepeating ( exprElems [ 1 ] , "59" ) ; exprElems [ 2 ] = extendRepeating ( exprElems [ 2 ] , "23" ) ; exprElems [ 3 ] = extendRepeating ( exprElems [ 3 ] , "L" ) ; exprElems [ 4 ] = extendRepeating ( exprElems [ 4 ] , "12" ) ; exprElems [ 5 ] = extendRepeating ( exprElems [ 5 ] , "SAT" ) ; // 3 . Replace " working days " to " MON - FRI " if ( exprElems [ 3 ] . indexOf ( 'W' ) >= 0 ) { exprElems [ 3 ] = exprElems [ 3 ] . replaceAll ( "W" , "" ) ; exprElems [ 5 ] = "MON-FRI" ; } // 4 . Replace " last day of week " to dates interval 24 - L if ( exprElems [ 5 ] . indexOf ( 'L' ) >= 0 ) { exprElems [ 5 ] = exprElems [ 5 ] . replaceAll ( "L" , "" ) ; exprElems [ 3 ] = "24-L" ; } // 5 . Ignore # n in days of week int indSharp = exprElems [ 5 ] . indexOf ( '#' ) ; if ( indSharp >= 0 ) exprElems [ 5 ] = exprElems [ 5 ] . substring ( 0 , indSharp ) ; // 6 . Change all question marks to asterisks if ( "?" . equals ( exprElems [ 3 ] ) ) exprElems [ 3 ] = "*" ; if ( "?" . equals ( exprElems [ 5 ] ) ) exprElems [ 5 ] = "*" ; // 7 . Ignore seconds and years return concat ( ' ' , exprElems [ 1 ] , exprElems [ 2 ] , exprElems [ 3 ] , exprElems [ 4 ] , exprElems [ 5 ] ) ; } else { return quartzExpr ; }
public class SplitButton { /** * Sets the horizontal alignment of the text . { @ code SplitButton } ' s default is * { @ code SwingConstants . CENTER } . * @ param alignment * the alignment value , one of the following values : * < ul > * < li > { @ code SwingConstants . RIGHT } * < li > { @ code SwingConstants . LEFT } * < li > { @ code SwingConstants . CENTER } ( default ) * < / ul > * @ throws IllegalArgumentException * if the alignment is not one of the valid values */ @ Override public final void setHorizontalAlignment ( int alignment ) { } }
if ( ( alignment == LEFT ) || ( alignment == CENTER ) || ( alignment == RIGHT ) ) { this . alignment = alignment ; reajustTextGap ( ) ; } else { throw new IllegalArgumentException ( ) ; }
public class ListAvailableSolutionStacksResult { /** * A list of available solution stacks . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSolutionStacks ( java . util . Collection ) } or { @ link # withSolutionStacks ( java . util . Collection ) } if you want * to override the existing values . * @ param solutionStacks * A list of available solution stacks . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListAvailableSolutionStacksResult withSolutionStacks ( String ... solutionStacks ) { } }
if ( this . solutionStacks == null ) { setSolutionStacks ( new com . amazonaws . internal . SdkInternalList < String > ( solutionStacks . length ) ) ; } for ( String ele : solutionStacks ) { this . solutionStacks . add ( ele ) ; } return this ;
public class Tuple3i { /** * { @ inheritDoc } */ @ Override public void clampMax ( int max , T t ) { } }
if ( this . x > max ) t . setX ( max ) ; if ( this . y > max ) t . setY ( max ) ; if ( this . z > max ) t . setZ ( max ) ;
public class IoUtil { /** * Writes the specified { @ code message } to the specified { @ code sessions } . * If the specified { @ code message } is an { @ link IoBuffer } , the buffer is * automatically duplicated using { @ link IoBuffer # duplicate ( ) } . */ public static List < WriteFuture > broadcast ( Object message , IoSession ... sessions ) { } }
if ( sessions == null ) { sessions = EMPTY_SESSIONS ; } List < WriteFuture > answer = new ArrayList < > ( sessions . length ) ; if ( message instanceof IoBuffer ) { for ( IoSession s : sessions ) { answer . add ( s . write ( ( ( IoBuffer ) message ) . duplicate ( ) ) ) ; } } else { for ( IoSession s : sessions ) { answer . add ( s . write ( message ) ) ; } } return answer ;
public class RESTMBeanServerConnection { /** * { @ inheritDoc } */ @ Override public boolean isInstanceOf ( ObjectName name , String className ) throws InstanceNotFoundException , IOException { } }
final String sourceMethod = "isInstanceOf" ; checkConnection ( ) ; if ( name . isPattern ( ) ) throw new InstanceNotFoundException ( RESTClientMessagesUtil . getMessage ( RESTClientMessagesUtil . OBJECT_NAME_PATTERN , name ) ) ; URL instanceOfURL = null ; HttpsURLConnection connection = null ; try { // Get URL for instanceOf instanceOfURL = getInstanceOfURL ( name , className ) ; // Get connection to server connection = getConnection ( instanceOfURL , HttpMethod . GET ) ; } catch ( IOException io ) { throw getRequestErrorException ( sourceMethod , io , instanceOfURL ) ; } // Check response code from server int responseCode = 0 ; try { responseCode = connection . getResponseCode ( ) ; } catch ( ConnectException ce ) { recoverConnection ( ce ) ; // Server is down ; not a client bug throw ce ; } switch ( responseCode ) { case HttpURLConnection . HTTP_OK : JSONConverter converter = JSONConverter . getConverter ( ) ; try { // Process and return server response , which should be a boolean return converter . readBoolean ( connection . getInputStream ( ) ) ; } catch ( Exception e ) { throw getResponseErrorException ( sourceMethod , e , instanceOfURL ) ; } finally { JSONConverter . returnConverter ( converter ) ; } case HttpURLConnection . HTTP_BAD_REQUEST : case HttpURLConnection . HTTP_INTERNAL_ERROR : try { // Server response should be a serialized Throwable throw getServerThrowable ( sourceMethod , connection ) ; } catch ( ClassNotFoundException cnf ) { throw new IOException ( cnf ) ; } catch ( InstanceNotFoundException inf ) { throw inf ; } catch ( Throwable t ) { throw new IOException ( RESTClientMessagesUtil . getMessage ( RESTClientMessagesUtil . UNEXPECTED_SERVER_THROWABLE ) , t ) ; } case HttpURLConnection . HTTP_UNAUTHORIZED : case HttpURLConnection . HTTP_FORBIDDEN : throw getBadCredentialsException ( responseCode , connection ) ; case HttpURLConnection . HTTP_GONE : case HttpURLConnection . HTTP_NOT_FOUND : IOException ioe = getResponseCodeErrorException ( sourceMethod , responseCode , connection ) ; recoverConnection ( ioe ) ; throw ioe ; default : throw getResponseCodeErrorException ( sourceMethod , responseCode , connection ) ; }
public class Utils { /** * 現在の処理ケースが該当するか判定する 。 * < p > ケースが指定されていないときは 、 該当すると判定する 。 < / p > * @ param currentCase 現在の処理ケース * @ param cases 判定対象のケース * @ return trueのとき判定対象 。 * @ throws IllegalArgumentException { @ code currentCase is null . } */ public static boolean isProcessCase ( final ProcessCase currentCase , ProcessCase [ ] cases ) { } }
ArgUtils . notNull ( currentCase , "currentCase" ) ; if ( currentCase == ProcessCase . Load ) { return isLoadCase ( cases ) ; } else if ( currentCase == ProcessCase . Save ) { return isSaveCase ( cases ) ; } else { throw new IllegalArgumentException ( "currentCase is not support:" + currentCase ) ; }
public class Node { /** * Gets the current channel , if the node is connected and online , or null . * This is just a convenience method for { @ link Computer # getChannel ( ) } with null check . */ @ CheckForNull public final VirtualChannel getChannel ( ) { } }
Computer c = toComputer ( ) ; return c == null ? null : c . getChannel ( ) ;
public class Histogram { /** * Returns the breakpoints between histogram cells for a dataset based on a * suggested bin width h . * @ param x the data set . * @ param h the bin width . * @ return the breakpoints between histogram cells */ public static double [ ] breaks ( double [ ] x , double h ) { } }
return breaks ( Math . min ( x ) , Math . max ( x ) , h ) ;
public class StringUtils { /** * Convert an array of bytes to a string of hex values * @ param bytes bytes to convert * @ return a string of hex values . */ public static String encodeHexString ( byte [ ] bytes ) { } }
char [ ] hexChars = new char [ bytes . length * 2 ] ; for ( int j = 0 ; j < bytes . length ; j ++ ) { int v = bytes [ j ] & 0xFF ; hexChars [ j * 2 ] = hexArray [ v >>> 4 ] ; hexChars [ j * 2 + 1 ] = hexArray [ v & 0x0F ] ; } return new String ( hexChars ) ;
public class AbstractLoaderFactory { /** * combine business date ( from this . buildLoadOperation ) , source attribute ( from this . addSourceAttributeOperation ) * and processing time ( from this . buildRefreshOperations ) */ public void createLoadTasksPerBusinessDate ( CacheLoaderContext context , Object sourceAttribute , BooleanFilter postLoadFilter ) { } }
if ( context . getRefreshInterval ( ) != null ) { this . createLoadTasksForRefresh ( context , sourceAttribute , postLoadFilter ) ; } else { this . createLoadTasksForInitialLoad ( context , sourceAttribute , postLoadFilter ) ; }
public class ByteArrayHolder { /** * Checks if there is enough space in the buffer to write N additional bytes . Will grow the buffer * if necessary . It takes current position in the buffer into account . * @ param numBytesToAdd the number of bytes you want to add to the buffer */ public void ensureHasSpace ( int numBytesToAdd ) { } }
if ( numBytesToAdd < 0 ) { throw new IllegalArgumentException ( "Number of bytes can't be negative" ) ; } int capacityLeft = getCapacityLeft ( ) ; if ( capacityLeft < numBytesToAdd ) { grow ( numBytesToAdd - capacityLeft , true ) ; }
public class PrettyTime { /** * Format the given { @ link Duration } object , using the { @ link TimeFormat } specified by the { @ link TimeUnit } contained * within . Rounding rules are ignored . If the given { @ link Duration } is < code > null < / code > , the current value of * { @ link System # currentTimeMillis ( ) } will be used instead . * @ param duration the { @ link Duration } to be formatted * @ return A formatted string representing { @ code duration } */ public String formatUnrounded ( Duration duration ) { } }
if ( duration == null ) return formatUnrounded ( now ( ) ) ; TimeFormat format = getFormat ( duration . getUnit ( ) ) ; String time = format . formatUnrounded ( duration ) ; return format . decorateUnrounded ( duration , time ) ;
public class RythmConfiguration { /** * Set template source home path * < p > < b > Note < / b > , this is not supposed to be used by user application or third party plugin < / p > */ public void setTemplateHome ( File home ) { } }
raw . put ( HOME_TEMPLATE . getKey ( ) , home ) ; data . put ( HOME_TEMPLATE , home ) ;
public class ResourceChange { /** * For the < code > Modify < / code > action , indicates which resource attribute is triggering this update , such as a * change in the resource attribute ' s < code > Metadata < / code > , < code > Properties < / code > , or < code > Tags < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setScope ( java . util . Collection ) } or { @ link # withScope ( java . util . Collection ) } if you want to override the * existing values . * @ param scope * For the < code > Modify < / code > action , indicates which resource attribute is triggering this update , such as * a change in the resource attribute ' s < code > Metadata < / code > , < code > Properties < / code > , or < code > Tags < / code > . * @ return Returns a reference to this object so that method calls can be chained together . * @ see ResourceAttribute */ public ResourceChange withScope ( String ... scope ) { } }
if ( this . scope == null ) { setScope ( new com . amazonaws . internal . SdkInternalList < String > ( scope . length ) ) ; } for ( String ele : scope ) { this . scope . add ( ele ) ; } return this ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLightFixture ( ) { } }
if ( ifcLightFixtureEClass == null ) { ifcLightFixtureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 341 ) ; } return ifcLightFixtureEClass ;
public class SupportsTypeImpl { /** * Creates for all String objects representing < code > portlet - mode < / code > elements , * a new < code > portlet - mode < / code > element * @ param values list of < code > portlet - mode < / code > objects * @ return the current instance of < code > SupportsType < T > < / code > */ public SupportsType < T > portletMode ( String ... values ) { } }
if ( values != null ) { for ( String name : values ) { childNode . createChild ( "portlet-mode" ) . text ( name ) ; } } return this ;
public class FieldsAndGetters { /** * Dumps all fields and getters of { @ code obj } to { @ code System . out } . * @ see # dumpIf */ public static void dumpAll ( String name , Object obj ) { } }
dumpAll ( name , obj , StringPrinter . systemOut ( ) ) ;
public class EndpointInfoBuilder { /** * Sets the available { @ link SerializationFormat } s . */ public EndpointInfoBuilder availableFormats ( SerializationFormat ... availableFormats ) { } }
requireNonNull ( availableFormats , "availableFormats" ) ; return availableFormats ( ImmutableSet . copyOf ( availableFormats ) ) ;
public class DbPersistenceManager { /** * Creates a parameter array for an SQL statement that needs * ( i ) a node identifier , and ( 2 ) another parameter . * @ param id the node id * @ param p the other parameter * @ param before whether the other parameter should be before the uuid parameter * @ return an Object array that represents the parameters */ protected Object [ ] createParams ( NodeId id , Object p , boolean before ) { } }
// Create the key List < Object > key = new ArrayList < Object > ( ) ; if ( getStorageModel ( ) == SM_BINARY_KEYS ) { key . add ( id . getRawBytes ( ) ) ; } else { key . add ( id . getMostSignificantBits ( ) ) ; key . add ( id . getLeastSignificantBits ( ) ) ; } // Create the parameters List < Object > params = new ArrayList < Object > ( ) ; if ( before ) { params . add ( p ) ; params . addAll ( key ) ; } else { params . addAll ( key ) ; params . add ( p ) ; } return params . toArray ( ) ;
public class MethodHandle { /** * Returns a method handle for a setter of the given field . * @ param fieldDescription The field to represent . * @ return A method handle for a setter of the given field . */ public static MethodHandle ofGetter ( FieldDescription . InDefinedShape fieldDescription ) { } }
return new MethodHandle ( HandleType . ofGetter ( fieldDescription ) , fieldDescription . getDeclaringType ( ) . asErasure ( ) , fieldDescription . getInternalName ( ) , fieldDescription . getType ( ) . asErasure ( ) , Collections . < TypeDescription > emptyList ( ) ) ;
public class LogHelper { /** * Check if logging is enabled for the passed class based on the error level * provided * @ param aLoggingClass * The class to determine the logger from . May not be < code > null < / code > * @ param aErrorLevel * The error level . May not be < code > null < / code > . * @ return < code > true < / code > if the respective log level is allowed , * < code > false < / code > if not */ public static boolean isEnabled ( @ Nonnull final Class < ? > aLoggingClass , @ Nonnull final IErrorLevel aErrorLevel ) { } }
return isEnabled ( LoggerFactory . getLogger ( aLoggingClass ) , aErrorLevel ) ;
public class ResourceUtil { /** * This method writes all data from a string to a temp file . The file will be automatically deleted on JVM shutdown . * @ param prefix file prefix i . e . " abc " in abc . txt * @ param suffix file suffix i . e . " . txt " in abc . txt * @ param data string containing the data to write to the file * @ return < code > File < / code > object * @ throws IOException IO exception */ public static File writeStringToTempFile ( String prefix , String suffix , String data ) throws IOException { } }
File testFile = File . createTempFile ( prefix , suffix ) ; BufferedWriter writer = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( testFile . getAbsoluteFile ( ) , false ) , DataUtilDefaults . charSet ) ) ; writer . write ( data ) ; writer . flush ( ) ; writer . close ( ) ; return testFile ;
public class SessionManager { /** * Method used by the recurring session purge event to scan for and * discard expired sessions . This prevents memory build up when sessions * are no longer queried by clients . */ protected void startPurge ( ) { } }
final boolean bTrace = TraceComponent . isAnyTracingEnabled ( ) ; if ( bTrace && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Running purge of expired sessions" ) ; } try { List < SessionImpl > toPurge = new ArrayList < SessionImpl > ( ) ; for ( Map < String , SessionImpl > sessions : this . groupings . values ( ) ) { synchronized ( sessions ) { // scan for all expired sessions for ( SessionImpl session : sessions . values ( ) ) { if ( session . checkExpiration ( false ) ) { toPurge . add ( session ) ; } } // now remove those sessions from the map ( outside the // iteration loop ) for ( SessionImpl session : toPurge ) { sessions . remove ( session . getId ( ) ) ; } } // end - sync // now iterate that list outside the lock ( involves calling // session listeners ) for ( SessionImpl session : toPurge ) { // if the session is still " valid " then we need to call // invalidate now if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Purging session; " + session ) ; } if ( ! session . isInvalid ( ) ) { session . invalidate ( ) ; } } toPurge . clear ( ) ; } // end - grouping - loop } catch ( Throwable t ) { FFDCFilter . processException ( t , getClass ( ) . getName ( ) , "purge" ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error while running purge scan; " + t ) ; } }
public class PipedReader { /** * Reads the next character of data from this piped stream . * If no character is available because the end of the stream * has been reached , the value < code > - 1 < / code > is returned . * This method blocks until input data is available , the end of * the stream is detected , or an exception is thrown . * @ return the next character of data , or < code > - 1 < / code > if the end of the * stream is reached . * @ exception IOException if the pipe is * < a href = PipedInputStream . html # BROKEN > < code > broken < / code > < / a > , * { @ link # connect ( java . io . PipedWriter ) unconnected } , closed , * or an I / O error occurs . */ public synchronized int read ( ) throws IOException { } }
if ( ! connected ) { throw new IOException ( "Pipe not connected" ) ; } else if ( closedByReader ) { throw new IOException ( "Pipe closed" ) ; } else if ( writeSide != null && ! writeSide . isAlive ( ) && ! closedByWriter && ( in < 0 ) ) { throw new IOException ( "Write end dead" ) ; } readSide = Thread . currentThread ( ) ; int trials = 2 ; while ( in < 0 ) { if ( closedByWriter ) { /* closed by writer , return EOF */ return - 1 ; } if ( ( writeSide != null ) && ( ! writeSide . isAlive ( ) ) && ( -- trials < 0 ) ) { throw new IOException ( "Pipe broken" ) ; } /* might be a writer waiting */ notifyAll ( ) ; try { wait ( 1000 ) ; } catch ( InterruptedException ex ) { throw new java . io . InterruptedIOException ( ) ; } } int ret = buffer [ out ++ ] ; if ( out >= buffer . length ) { out = 0 ; } if ( in == out ) { /* now empty */ in = - 1 ; } return ret ;
public class ListKeyPoliciesResult { /** * A list of policy names . Currently , there is only one policy and it is named " Default " . * @ param policyNames * A list of policy names . Currently , there is only one policy and it is named " Default " . */ public void setPolicyNames ( java . util . Collection < String > policyNames ) { } }
if ( policyNames == null ) { this . policyNames = null ; return ; } this . policyNames = new com . ibm . cloud . objectstorage . internal . SdkInternalList < String > ( policyNames ) ;
public class ListChangeSetsResult { /** * A list of < code > ChangeSetSummary < / code > structures that provides the ID and status of each change set for the * specified stack . * @ return A list of < code > ChangeSetSummary < / code > structures that provides the ID and status of each change set for * the specified stack . */ public java . util . List < ChangeSetSummary > getSummaries ( ) { } }
if ( summaries == null ) { summaries = new com . amazonaws . internal . SdkInternalList < ChangeSetSummary > ( ) ; } return summaries ;
public class BaseMetadataHandler { /** * Converts SQL column type ( returned from Database Metadata description class ) into QueryParameters Direction * Enumeration * @ param columnType SQL column type ( returned from DatabaseMetadata class ) * @ return QueryParameters . Direction value */ private QueryParameters . Direction convertToDirection ( int columnType ) { } }
QueryParameters . Direction result = null ; if ( columnType == DatabaseMetaData . procedureColumnIn ) { result = QueryParameters . Direction . IN ; } else if ( columnType == DatabaseMetaData . procedureColumnInOut ) { result = QueryParameters . Direction . INOUT ; } else if ( columnType == DatabaseMetaData . procedureColumnOut ) { result = QueryParameters . Direction . OUT ; } else if ( columnType == DatabaseMetaData . procedureColumnReturn ) { result = QueryParameters . Direction . RETURN ; } else if ( columnType == DatabaseMetaData . procedureColumnResult ) { result = QueryParameters . Direction . RETURN ; } else { throw new IllegalArgumentException ( "Incorrect column type: " + columnType ) ; } return result ;
public class RTMPHandshake { /** * Returns the public key for a given key pair . * @ param keyPair key pair * @ return public key */ protected byte [ ] getPublicKey ( KeyPair keyPair ) { } }
DHPublicKey incomingPublicKey = ( DHPublicKey ) keyPair . getPublic ( ) ; BigInteger dhY = incomingPublicKey . getY ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Public key: {}" , Hex . encodeHexString ( BigIntegers . asUnsignedByteArray ( dhY ) ) ) ; } return Arrays . copyOfRange ( BigIntegers . asUnsignedByteArray ( dhY ) , 0 , KEY_LENGTH ) ;
public class ContextDebugService { /** * Called by specific HK2 lifecycle listener to check if bean is properly instantiated by HK2. * @ param type instantiated bean type */ public void hkManage ( final Class < ? > type ) { } }
if ( ! JerseyBinding . isHK2Managed ( type , options . get ( JerseyExtensionsManagedByGuice ) ) ) { throw new WrongContextException ( "HK2 creates service %s which must be managed by guice." , type . getName ( ) ) ; } hkManaged . add ( type ) ;
public class ModelsImpl { /** * Get information about the Pattern . Any entity models . * @ param appId The application ID . * @ param versionId The version ID . * @ param getPatternAnyEntityInfosOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; PatternAnyEntityExtractor & gt ; object */ public Observable < List < PatternAnyEntityExtractor > > getPatternAnyEntityInfosAsync ( UUID appId , String versionId , GetPatternAnyEntityInfosOptionalParameter getPatternAnyEntityInfosOptionalParameter ) { } }
return getPatternAnyEntityInfosWithServiceResponseAsync ( appId , versionId , getPatternAnyEntityInfosOptionalParameter ) . map ( new Func1 < ServiceResponse < List < PatternAnyEntityExtractor > > , List < PatternAnyEntityExtractor > > ( ) { @ Override public List < PatternAnyEntityExtractor > call ( ServiceResponse < List < PatternAnyEntityExtractor > > response ) { return response . body ( ) ; } } ) ;
public class SentenceDetectorME { /** * Detect sentences in a String . * @ param s The string to be processed . * @ return A string array containing individual sentences as elements . */ public String [ ] sentDetect ( String s ) { } }
int [ ] starts = sentPosDetect ( s ) ; if ( starts . length == 0 ) { return new String [ ] { s } ; } boolean leftover = starts [ starts . length - 1 ] != s . length ( ) ; String [ ] sents = new String [ leftover ? starts . length + 1 : starts . length ] ; sents [ 0 ] = s . substring ( 0 , starts [ 0 ] ) ; for ( int si = 1 ; si < starts . length ; si ++ ) { sents [ si ] = s . substring ( starts [ si - 1 ] , starts [ si ] ) ; } if ( leftover ) { sents [ sents . length - 1 ] = s . substring ( starts [ starts . length - 1 ] ) ; } return ( sents ) ;
public class KriptonXmlContext { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . AbstractContext # createSerializer ( java . io . File , com . fasterxml . jackson . core . JsonEncoding ) */ public XmlWrapperSerializer createSerializer ( File file , JsonEncoding encoding ) { } }
try { XMLSerializer xmlStreamWriter = new XMLSerializer ( new FileWriter ( file ) ) ; return new XmlWrapperSerializer ( xmlStreamWriter ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; throw new KriptonRuntimeException ( e ) ; }
public class DeviceDataDAODefaultImpl { public int [ ] extractUShortArray ( final DeviceData deviceData ) { } }
final short [ ] argout = DevVarUShortArrayHelper . extract ( deviceData . getAny ( ) ) ; final int [ ] val = new int [ argout . length ] ; for ( int i = 0 ; i < argout . length ; i ++ ) { val [ i ] = 0xFFFF & argout [ i ] ; } return val ;
public class RGBE { /** * Simple read routine . Will not correctly handle run length encoding . */ public static void readPixels ( DataInput in , float [ ] data , int numpixels ) throws IOException { } }
byte [ ] rgbe = new byte [ 4 ] ; float [ ] rgb = new float [ 3 ] ; int offset = 0 ; while ( numpixels -- > 0 ) { in . readFully ( rgbe ) ; rgbe2float ( rgb , rgbe , 0 ) ; data [ offset ++ ] = rgb [ 0 ] ; data [ offset ++ ] = rgb [ 1 ] ; data [ offset ++ ] = rgb [ 2 ] ; }
public class RestController { /** * Finds a workflow instance . * < pre > * Request : GET / workflowInstance / 1 * Response : OK { refNum : 1 , workflowName : " credit . step1 " , workflowVersion : null , label1 : " one " , label2 : null , status : NEW } * Response : NOT _ FOUND , if no such workflow instance exists * < / pre > */ @ RequestMapping ( method = RequestMethod . GET , value = "/workflowInstance/{woinRefNum}" , produces = { } }
MediaType . APPLICATION_JSON_VALUE , MediaType . TEXT_XML_VALUE } ) public ResponseEntity < WorkflowInstanceRestModel > find ( @ PathVariable long woinRefNum ) { WorkflowInstanceState woin = facade . findWorkflowInstance ( woinRefNum , null ) ; if ( woin == null ) { return new ResponseEntity < > ( HttpStatus . NOT_FOUND ) ; } else { return new ResponseEntity < > ( createInstanceModel ( woin ) , HttpStatus . OK ) ; }
public class JSON { /** * Get a { @ code boolean } from a { @ link JSONValue } . Return { @ code false } if the * { @ link JSONValue } is { @ code null } . * @ param value the { @ link JSONValue } * @ return the value as a { @ code boolean } * @ throws JSONException if the value is not a boolean */ public static boolean getBoolean ( JSONValue value ) { } }
if ( value == null ) return false ; if ( ! ( value instanceof JSONBoolean ) ) throw new JSONException ( NOT_A_BOOLEAN ) ; return ( ( JSONBoolean ) value ) . booleanValue ( ) ;
public class Iterators { /** * Calls { @ code next ( ) } on { @ code iterator } , either { @ code offset } times * or until { @ code hasNext ( ) } returns { @ code false } , whichever comes first . * This is a lazy evaluation operation . The { @ code skip } action is only triggered when { @ code Iterator . hasNext ( ) } or { @ code Iterator . next ( ) } is called . * @ param iter * @ param offset * @ param count * @ return */ public static < T > ObjIterator < T > limit ( final Iterator < T > iter , final long offset , final long count ) { } }
N . checkArgNotNegative ( count , "offset" ) ; N . checkArgNotNegative ( count , "count" ) ; if ( iter == null ) { return ObjIterator . empty ( ) ; } return new ObjIterator < T > ( ) { private long cnt = count ; private boolean skipped = false ; @ Override public boolean hasNext ( ) { if ( skipped == false ) { skip ( ) ; } return cnt > 0 && iter . hasNext ( ) ; } @ Override public T next ( ) { if ( hasNext ( ) == false ) { throw new NoSuchElementException ( ) ; } cnt -- ; return iter . next ( ) ; } private void skip ( ) { long idx = 0 ; while ( idx ++ < offset && iter . hasNext ( ) ) { iter . next ( ) ; } skipped = true ; } } ;
public class WhiteboxImpl { /** * Gets the argument types as string . * @ param arguments the arguments * @ return the argument types as string */ static String getArgumentTypesAsString ( Object ... arguments ) { } }
StringBuilder argumentsAsString = new StringBuilder ( ) ; final String noParameters = "<none>" ; if ( arguments != null && arguments . length != 0 ) { for ( int i = 0 ; i < arguments . length ; i ++ ) { String argumentName = null ; Object argument = arguments [ i ] ; if ( argument instanceof Class < ? > ) { argumentName = ( ( Class < ? > ) argument ) . getName ( ) ; } else if ( argument instanceof Class < ? > [ ] && arguments . length == 1 ) { Class < ? > [ ] argumentArray = ( Class < ? > [ ] ) argument ; if ( argumentArray . length > 0 ) { for ( int j = 0 ; j < argumentArray . length ; j ++ ) { appendArgument ( argumentsAsString , j , argumentArray [ j ] == null ? "null" : getUnproxyType ( argumentArray [ j ] ) . getName ( ) , argumentArray ) ; } return argumentsAsString . toString ( ) ; } else { argumentName = noParameters ; } } else if ( argument == null ) { argumentName = "null" ; } else { argumentName = getUnproxyType ( argument ) . getName ( ) ; } appendArgument ( argumentsAsString , i , argumentName , arguments ) ; } } else { argumentsAsString . append ( "<none>" ) ; } return argumentsAsString . toString ( ) ;
public class AWSCloudCollector { /** * Creates a static prototype of the Cloud Collector , which includes any * specific settings or configuration required for the use of this * collector , including settings for connecting to any source systems . * @ return A configured Cloud Collector prototype */ public static AWSCloudCollector prototype ( ) { } }
AWSCloudCollector protoType = new AWSCloudCollector ( ) ; protoType . setName ( "AWSCloud" ) ; protoType . setOnline ( true ) ; protoType . setEnabled ( true ) ; protoType . setCollectorType ( CollectorType . Cloud ) ; protoType . setLastExecuted ( System . currentTimeMillis ( ) ) ; return protoType ;
public class AbstractCommunicationHandler { /** * / * ( non - Javadoc ) * @ see de . fhhannover . inform . trust . ifmapj . channel . CommunicationHandler # closeTcpConnection ( ) */ @ Override public final void closeTcpConnection ( ) throws CommunicationException { } }
IOException tmpException = null ; try { closeTcpConnectionImpl ( ) ; } catch ( IOException e ) { tmpException = e ; } finally { try { if ( mSocket != null ) { mSocket . close ( ) ; } } catch ( IOException e ) { tmpException = e ; } finally { mSocket = null ; } } if ( tmpException != null ) { throw new CommunicationException ( tmpException ) ; }
public class Streams { /** * Perform a flatMap operation where the result will be a flattened stream of Strings * from the text loaded from the supplied files . * < pre > * { @ code * List < String > result = Streams . liftAndBindFile ( Stream . of ( " input . file " ) * . map ( getClass ( ) . getClassLoader ( ) : : getResource ) * . peek ( System . out : : println ) * . map ( URL : : getFile ) * , File : : new ) * . toList ( ) ; * assertThat ( result , equalTo ( Arrays . asList ( " hello " , " world " ) ) ) ; * < / pre > * @ param fn * @ return */ public final static < T > Stream < String > flatMapFile ( final Stream < T > stream , final Function < ? super T , File > fn ) { } }
return stream . flatMap ( fn . andThen ( f -> ExceptionSoftener . softenSupplier ( ( ) -> Files . lines ( Paths . get ( f . getAbsolutePath ( ) ) ) ) . get ( ) ) ) ;
public class Counters { /** * For counters with large # of entries , this scales down each entry in the * sum , to prevent an extremely large sum from building up and overwhelming * the max double . This may also help reduce error by preventing loss of SD ' s * with extremely large values . * @ param < E > * @ param < C > */ public static < E , C extends Counter < E > > double saferL2Norm ( C c ) { } }
double maxVal = 0.0 ; for ( E key : c . keySet ( ) ) { double value = Math . abs ( c . getCount ( key ) ) ; if ( value > maxVal ) maxVal = value ; } double sqrSum = 0.0 ; for ( E key : c . keySet ( ) ) { double count = c . getCount ( key ) ; sqrSum += Math . pow ( count / maxVal , 2 ) ; } return maxVal * Math . sqrt ( sqrSum ) ;
public class ExecutorLoadBalancingConfig { /** * Use this if you have an enum used for your group type that defines the priority of a task and * you ALWAYS want to execute high priority items over low priority items . You should really * check out the < code > useLoadBalancedEnumOrdinalPrioritizer < / code > which works similarly but * prevents starvation of low priority items by allowing them to run some of the time * @ param groupClass * @ return */ public ExecutorLoadBalancingConfig < GROUP > useEnumOrdinalPrioritizer ( Class < GROUP > groupClass ) { } }
if ( ! groupClass . isEnum ( ) ) { throw new IllegalArgumentException ( "The group class " + groupClass + " is not an enum" ) ; } groupPrioritizer = new EnumOrdinalPrioritizer < GROUP > ( ) ; return this ;
public class PropertyChangeListeners { /** * Add the given property change listener to the given object and all * its sub - objects , and make sure that the property change listener * will be attached to all sub - objects that are set , and removed from * all sub - objects that are removed . < br > * < br > * The returned { @ link ObservedObject } instance may be used to * detach all property change listeners that have been attached * with this call : * < pre > < code > * ObservedObject observedObject = * PropertyChangeListeners . addDeepPropertyChangeListener ( someObject , p ) ; * / / Set some properties , informing the PropertyChangeListeners * someObject . setFoo ( " foo " ) ; * someObject . getInnerObject ( ) . setBar ( " bar " ) ; * / / Detach the deep property change listener : * observedObject . detach ( ) ; * < / code > < / pre > * @ param object The object * @ param propertyChangeListener The property change listener * @ return The { @ link ObservedObject } */ public static ObservedObject addDeepPropertyChangeListener ( Object object , PropertyChangeListener propertyChangeListener ) { } }
Objects . requireNonNull ( object , "The object may not be null" ) ; Objects . requireNonNull ( propertyChangeListener , "The propertyChangeListener may not be null" ) ; PropertyChangeListener forwardingPropertyChangeListener = new PropertyChangeListener ( ) { @ Override public void propertyChange ( PropertyChangeEvent event ) { Object oldValue = event . getOldValue ( ) ; Object newValue = event . getNewValue ( ) ; removeRecursive ( oldValue , propertyChangeListener ) ; removeRecursive ( oldValue , this ) ; addRecursive ( newValue , propertyChangeListener ) ; addRecursive ( newValue , this ) ; } } ; addRecursive ( object , propertyChangeListener ) ; addRecursive ( object , forwardingPropertyChangeListener ) ; ObservedObject observedObject = new ObservedObject ( object , propertyChangeListener , forwardingPropertyChangeListener ) ; return observedObject ;
public class TableDefinition { /** * Determine the shard number of the given object for this table . If this table is * not sharded or the object has no value for its sharding field * ( see { @ link # getShardingField ( ) } ) , the shard number is 0 . Otherwise , the * sharding - field value is used to determine the shard number based on the table ' s * sharding start and granularity . * Note : The caller must ensure that if a value exists for the sharding - field , it is * loaded into the given DBObject . Otherwise , this method will incorrectly assume the * sharding value has not been set and therefore imply shard 0. * @ param dbObj { @ link DBObject } to determine shard number . * @ return 0 if the object ' s owning table is not sharded , the object has no * sharding - field value , or the object ' s sharding field value places * it before sharding was started . Otherwise , the value is & gt ; 0. */ public int getShardNumber ( DBObject dbObj ) { } }
if ( ! isSharded ( ) ) { return 0 ; } String value = dbObj . getFieldValue ( getShardingField ( ) . getName ( ) ) ; if ( value == null ) { return 0 ; } Date shardingFieldValue = Utils . dateFromString ( value ) ; return computeShardNumber ( shardingFieldValue ) ;
public class ParameterTable { /** * { @ inheritDoc } */ @ Override protected void _from ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
// 1 : read parameter size final int size = in . readInt ( ) ; // size collections accordingly parameters = new LinkedHashSet < TableParameter > ( size ) ; indexParameter = sizedHashMap ( size ) ; parameterIndex = sizedHashMap ( size ) ; count = sizedHashMap ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { TableParameter tp = new TableParameter ( ) ; // 1 : read each table parameter tp . readExternal ( in ) ; parameters . add ( tp ) ; // 2 : read index Integer idx = readInteger ( in ) ; indexParameter . put ( idx , tp ) ; parameterIndex . put ( tp , idx ) ; // 3 : read table parameter count Integer pct = readInteger ( in ) ; count . put ( tp , pct ) ; } // 2 : read number of global indices final int size2 = in . readInt ( ) ; // size map accordingly globalIndex = sizedHashMap ( size2 ) ; for ( int i = 0 ; i < size2 ; i ++ ) { // read each key / value Integer key = readInteger ( in ) ; Integer value = readInteger ( in ) ; globalIndex . put ( key , value ) ; } // 3 : read number of uuids final int size3 = in . readInt ( ) ; globalUUIDs = sizedHashMap ( size3 ) ; for ( int i = 0 ; i < size3 ; i ++ ) { Integer key = readInteger ( in ) ; Long msb = readLong ( in ) ; Long lsb = readLong ( in ) ; globalUUIDs . put ( key , new SkinnyUUID ( msb , lsb ) ) ; }
public class HttpQuery { /** * Sends a 404 error page to the client . * Handles responses from deprecated API calls */ @ Override public void notFound ( ) { } }
logWarn ( "Not Found: " + request ( ) . getUri ( ) ) ; if ( this . api_version > 0 ) { // always default to the latest version of the error formatter since we // need to return something switch ( this . api_version ) { case 1 : default : sendReply ( HttpResponseStatus . NOT_FOUND , serializer . formatNotFoundV1 ( ) ) ; } return ; } if ( hasQueryStringParam ( "json" ) ) { sendReply ( HttpResponseStatus . NOT_FOUND , new StringBuilder ( "{\"err\":\"Page Not Found\"}" ) ) ; } else { sendReply ( HttpResponseStatus . NOT_FOUND , PAGE_NOT_FOUND ) ; }
public class ClassUtils { /** * Get property type by a setter method . * @ param setter setter of Java bean class * @ return type of the specified setter method * @ throws org . javasimon . utils . bean . BeanUtilsException if specified method does not has setter signature */ static Class < ? > getSetterType ( Method setter ) { } }
Class < ? > [ ] parameterTypes = setter . getParameterTypes ( ) ; if ( parameterTypes . length != 1 ) { throw new BeanUtilsException ( String . format ( "Method %s has %d parameters and cannot be a setter" , setter . getName ( ) , parameterTypes . length ) ) ; } return parameterTypes [ 0 ] ;
public class MutableBigInteger { /** * Calculate the multiplicative inverse of this mod 2 ^ k . */ MutableBigInteger modInverseMP2 ( int k ) { } }
if ( isEven ( ) ) throw new ArithmeticException ( "Non-invertible. (GCD != 1)" ) ; if ( k > 64 ) return euclidModInverse ( k ) ; int t = inverseMod32 ( value [ offset + intLen - 1 ] ) ; if ( k < 33 ) { t = ( k == 32 ? t : t & ( ( 1 << k ) - 1 ) ) ; return new MutableBigInteger ( t ) ; } long pLong = ( value [ offset + intLen - 1 ] & LONG_MASK ) ; if ( intLen > 1 ) pLong |= ( ( long ) value [ offset + intLen - 2 ] << 32 ) ; long tLong = t & LONG_MASK ; tLong = tLong * ( 2 - pLong * tLong ) ; // 1 more Newton iter step tLong = ( k == 64 ? tLong : tLong & ( ( 1L << k ) - 1 ) ) ; MutableBigInteger result = new MutableBigInteger ( new int [ 2 ] ) ; result . value [ 0 ] = ( int ) ( tLong >>> 32 ) ; result . value [ 1 ] = ( int ) tLong ; result . intLen = 2 ; result . normalize ( ) ; return result ;
public class ECAImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . ECA__CAT_NAME : setCATName ( CAT_NAME_EDEFAULT ) ; return ; case AfplibPackage . ECA__TRIPLETS : getTriplets ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class AbstractNumberVectorDistanceFunction { /** * Get the common dimensionality of the two objects . Throw an * { @ link IllegalArgumentException } otherwise . * @ param o1 First vector / MBR * @ param o2 Second vector / MBR * @ return Common dimensionality * @ throws IllegalArgumentException when dimensionalities are not the same . */ public static final int dimensionality ( SpatialComparable o1 , SpatialComparable o2 ) { } }
final int dim1 = o1 . getDimensionality ( ) , dim2 = o2 . getDimensionality ( ) ; if ( dim1 != dim2 ) { throw new IllegalArgumentException ( "Objects do not have the same dimensionality." ) ; } return dim1 ;
public class ImmutableAnalysis { /** * Check a single class ' fields for immutability . * @ param immutableTyParams the in - scope immutable type parameters * @ param classType the type to check the fields of */ Violation areFieldsImmutable ( Optional < ClassTree > tree , ImmutableSet < String > immutableTyParams , ClassType classType , ViolationReporter reporter ) { } }
ClassSymbol classSym = ( ClassSymbol ) classType . tsym ; if ( classSym . members ( ) == null ) { return Violation . absent ( ) ; } Filter < Symbol > instanceFieldFilter = new Filter < Symbol > ( ) { @ Override public boolean accepts ( Symbol symbol ) { return symbol . getKind ( ) == ElementKind . FIELD && ! symbol . isStatic ( ) ; } } ; Map < Symbol , Tree > declarations = new HashMap < > ( ) ; if ( tree . isPresent ( ) ) { for ( Tree member : tree . get ( ) . getMembers ( ) ) { Symbol sym = ASTHelpers . getSymbol ( member ) ; if ( sym != null ) { declarations . put ( sym , member ) ; } } } // javac gives us members in reverse declaration order // handling them in declaration order leads to marginally better diagnostics List < Symbol > members = ImmutableList . copyOf ( classSym . members ( ) . getSymbols ( instanceFieldFilter ) ) . reverse ( ) ; for ( Symbol member : members ) { Optional < Tree > memberTree = Optional . ofNullable ( declarations . get ( member ) ) ; Violation info = isFieldImmutable ( memberTree , immutableTyParams , classSym , classType , ( VarSymbol ) member , reporter ) ; if ( info . isPresent ( ) ) { return info ; } } return Violation . absent ( ) ;
public class Log4jConfigurer { /** * Get the declared level of the given logger . * @ param loggerName logger name * @ return logger level or < code > null < / code > if logger is not defined or if the level of this logger is not defined . */ @ Nullable public String getLoggerLevel ( @ Nullable String loggerName ) { } }
try { loggerName = loggerName == null ? "" : loggerName ; if ( loggerName . isEmpty ( ) ) { return LogManager . getRootLogger ( ) . getLevel ( ) . toString ( ) ; } Logger logger = LogManager . exists ( loggerName ) ; if ( logger == null ) { return null ; } else { Level level = logger . getLevel ( ) ; if ( level == null ) { return null ; } return level . toString ( ) ; } } catch ( RuntimeException e ) { logger . warn ( "Exception getting effective logger level " + loggerName , e ) ; throw e ; }
public class LargeObjectManager { /** * This opens an existing large object , based on its OID . * @ param oid of large object * @ param mode mode of open * @ return LargeObject instance providing access to the object * @ throws SQLException on error * @ deprecated As of 8.3 , replaced by { @ link # open ( long , int ) } */ @ Deprecated public LargeObject open ( int oid , int mode ) throws SQLException { } }
return open ( ( long ) oid , mode , false ) ;
public class AnalysisLog { /** * Log that this record has been freed . * Call this from the end of record . free * @ param record the record that is being added . */ public void logRemoveRecord ( Rec record , int iSystemID ) { } }
try { this . getTable ( ) . setProperty ( DBParams . SUPRESSREMOTEDBMESSAGES , DBConstants . TRUE ) ; this . getTable ( ) . getDatabase ( ) . setProperty ( DBParams . MESSAGES_TO_REMOTE , DBConstants . FALSE ) ; this . addNew ( ) ; this . getField ( AnalysisLog . SYSTEM_ID ) . setValue ( iSystemID ) ; this . getField ( AnalysisLog . OBJECT_ID ) . setValue ( Debug . getObjectID ( record , true ) ) ; this . setKeyArea ( AnalysisLog . OBJECT_ID_KEY ) ; if ( this . seek ( null ) ) { this . edit ( ) ; ( ( DateTimeField ) this . getField ( AnalysisLog . FREE_TIME ) ) . setValue ( DateTimeField . currentTime ( ) ) ; if ( this . getField ( AnalysisLog . RECORD_OWNER ) . isNull ( ) ) this . getField ( AnalysisLog . RECORD_OWNER ) . setString ( Debug . getClassName ( ( ( Record ) record ) . getRecordOwner ( ) ) ) ; this . set ( ) ; } else { // Ignore for now System . exit ( 1 ) ; } } catch ( DBException ex ) { ex . printStackTrace ( ) ; }
public class CreateDynamicThingGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateDynamicThingGroupRequest createDynamicThingGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createDynamicThingGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createDynamicThingGroupRequest . getThingGroupName ( ) , THINGGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( createDynamicThingGroupRequest . getThingGroupProperties ( ) , THINGGROUPPROPERTIES_BINDING ) ; protocolMarshaller . marshall ( createDynamicThingGroupRequest . getIndexName ( ) , INDEXNAME_BINDING ) ; protocolMarshaller . marshall ( createDynamicThingGroupRequest . getQueryString ( ) , QUERYSTRING_BINDING ) ; protocolMarshaller . marshall ( createDynamicThingGroupRequest . getQueryVersion ( ) , QUERYVERSION_BINDING ) ; protocolMarshaller . marshall ( createDynamicThingGroupRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Observance { /** * Returns a cached onset for the specified date . * @ param date * @ return a cached onset date or null if no cached onset is applicable for the specified date */ private DateTime getCachedOnset ( final Date date ) { } }
int index = Arrays . binarySearch ( onsetsMillisec , date . getTime ( ) ) ; if ( index >= 0 ) { return onsetsDates [ index ] ; } else { int insertionIndex = - index - 1 ; return onsetsDates [ insertionIndex - 1 ] ; }
public class CacheHandler { /** * 按查询方法生成缓存key前缀 * @ param entityClassName * @ param method * @ return */ private QueryMethodCache generateQueryMethodCacheByMethod ( EntityInfo entityInfo , Method method ) { } }
Class < ? > mapperClass = entityInfo . getMapperClass ( ) ; Class < ? > entityClass = entityInfo . getEntityClass ( ) ; QueryMethodCache methodCache = new QueryMethodCache ( ) ; String methodName = mapperClass . getName ( ) + SPLIT_PONIT + method . getName ( ) ; methodCache . methodName = methodName ; methodCache . fieldNames = new String [ method . getParameterTypes ( ) . length ] ; methodCache . cacheGroupKey = entityClass . getSimpleName ( ) + GROUPKEY_SUFFIX ; methodCache . collectionResult = method . getReturnType ( ) == List . class || method . getReturnType ( ) == Set . class ; if ( methodCache . collectionResult ) { methodCache . groupRalated = true ; } else { // count等统计查询 methodCache . groupRalated = method . getReturnType ( ) . isAnnotationPresent ( Table . class ) == false ; } StringBuilder sb = new StringBuilder ( entityClass . getSimpleName ( ) ) . append ( SPLIT_PONIT ) . append ( method . getName ( ) ) ; Annotation [ ] [ ] annotations = method . getParameterAnnotations ( ) ; for ( int i = 0 ; i < annotations . length ; i ++ ) { Annotation [ ] aa = annotations [ i ] ; if ( aa . length > 0 ) { String fieldName = null ; inner : for ( Annotation annotation : aa ) { if ( annotation . toString ( ) . contains ( Param . class . getName ( ) ) ) { fieldName = ( ( Param ) annotation ) . value ( ) ; break inner ; } } if ( ! methodCache . groupRalated && MybatisMapperParser . entityHasProperty ( entityClass , fieldName ) ) { methodCache . fieldNames [ i ] = fieldName ; } } else { if ( ! methodCache . groupRalated ) { throw new MybatisHanlerInitException ( String . format ( "unique查询方法[%s] 使用了自动缓存Annotation @Cache,参数必须使用 @Param 绑定属性名称" , methodName ) ) ; } } sb . append ( i == 0 ? ":" : "_" ) . append ( "%s" ) ; } if ( ! methodCache . groupRalated && methodCache . fieldNames . length == 1 && entityInfo . getIdProperty ( ) . equals ( methodCache . fieldNames [ 0 ] ) ) { throw new MybatisHanlerInitException ( String . format ( "按主键查询方法[%s] 使用了自动缓存Annotation @Cache,请使用默认方法[%s]代替" , methodName , methodDefine . selectName ( ) ) ) ; } methodCache . keyPattern = sb . toString ( ) ; return methodCache ;
public class AwesomeTextView { /** * Starts a Flashing Animation on the AwesomeTextView * @ param forever whether the animation should be infinite or play once * @ param speed how fast the item should flash */ public void startFlashing ( boolean forever , AnimationSpeed speed ) { } }
Animation fadeIn = new AlphaAnimation ( 0 , 1 ) ; // set up extra variables fadeIn . setDuration ( 50 ) ; fadeIn . setRepeatMode ( Animation . REVERSE ) ; // default repeat count is 0 , however if user wants , set it up to be infinite fadeIn . setRepeatCount ( 0 ) ; if ( forever ) { fadeIn . setRepeatCount ( Animation . INFINITE ) ; } fadeIn . setStartOffset ( speed . getFlashDuration ( ) ) ; startAnimation ( fadeIn ) ;
public class FileUtil { /** * 获得一个带缓存的写入对象 * @ param path 输出路径 , 绝对路径 * @ param charset 字符集 * @ param isAppend 是否追加 * @ return BufferedReader对象 * @ throws IORuntimeException IO异常 */ public static BufferedWriter getWriter ( String path , Charset charset , boolean isAppend ) throws IORuntimeException { } }
return getWriter ( touch ( path ) , charset , isAppend ) ;
public class XPathScanner { /** * Returns the type of the given character . * @ param paramInput * The character the type should be determined * @ return type of the given character . */ private TokenType retrieveType ( final char paramInput ) { } }
TokenType type ; switch ( paramInput ) { case ',' : type = TokenType . COMMA ; break ; case '(' : type = TokenType . OPEN_BR ; break ; case ')' : type = TokenType . CLOSE_BR ; break ; case '[' : type = TokenType . OPEN_SQP ; break ; case ']' : type = TokenType . CLOSE_SQP ; break ; case '@' : type = TokenType . AT ; break ; case '=' : type = TokenType . EQ ; break ; case '<' : case '>' : type = TokenType . COMP ; break ; case '!' : type = TokenType . N_EQ ; break ; case '/' : type = TokenType . SLASH ; break ; case ':' : type = TokenType . COLON ; break ; case '.' : type = TokenType . POINT ; break ; case '+' : type = TokenType . PLUS ; break ; case '-' : type = TokenType . MINUS ; break ; case '\'' : type = TokenType . SINGLE_QUOTE ; break ; case '"' : type = TokenType . DBL_QUOTE ; break ; case '$' : type = TokenType . DOLLAR ; break ; case '?' : type = TokenType . INTERROGATION ; break ; case '*' : type = TokenType . STAR ; break ; case '|' : type = TokenType . OR ; break ; default : type = TokenType . INVALID ; } return type ;
public class RtfList { /** * Writes only the list number and list level number . * @ param result The < code > OutputStream < / code > to write to * @ throws IOException On i / o errors . * @ since 2.1.3 */ protected void writeListNumbers ( final OutputStream result ) throws IOException { } }
result . write ( RtfList . LIST_NUMBER ) ; result . write ( intToByteArray ( listNumber ) ) ;
public class SessionException { /** * Converts a Throwable to a SessionException . If the Throwable is a * SessionException , it will be passed through unmodified ; otherwise , it will be wrapped * in a new SessionException . * @ param cause the Throwable to convert * @ return a SessionException */ public static SessionException fromThrowable ( Throwable cause ) { } }
return ( cause instanceof SessionException ) ? ( SessionException ) cause : new SessionException ( cause ) ;
public class PointerHierarchyRepresentationResult { /** * Get / compute the positions . * @ return Dendrogram positions */ public IntegerDataStore getPositions ( ) { } }
if ( positions != null ) { return positions ; // Return cached . } final ArrayDBIDs order = topologicalSort ( ) ; WritableIntegerDataStore siz = computeSubtreeSizes ( order ) ; WritableIntegerDataStore pos = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB , - 1 ) ; WritableIntegerDataStore ins = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , - 1 ) ; int defins = 0 ; // Next root insertion position . DBIDVar v1 = DBIDUtil . newVar ( ) ; // Place elements based on their successor for ( DBIDArrayIter it = order . iter ( ) . seek ( order . size ( ) - 1 ) ; it . valid ( ) ; it . retract ( ) ) { final int size = siz . intValue ( it ) ; parent . assignVar ( it , v1 ) ; // v1 = parent final int ipos = ins . intValue ( v1 ) ; // Position of parent if ( ipos < 0 || DBIDUtil . equal ( it , v1 ) ) { // Root : use interval [ defins ; defins + size ] ins . putInt ( it , defins ) ; pos . putInt ( it , defins + size - 1 ) ; defins += size ; continue ; } // Insertion position of parent = leftmost pos . putInt ( it , ipos + size - 1 ) ; ins . putInt ( it , ipos ) ; ins . increment ( v1 , size ) ; } siz . destroy ( ) ; ins . destroy ( ) ; return positions = pos ;
public class NameUtils { /** * Returns the class name without the package prefix . * @ param className The class name from which to get a short name * @ return The short name of the class */ public static String getShortName ( String className ) { } }
int i = className . lastIndexOf ( "." ) ; if ( i > - 1 ) { className = className . substring ( i + 1 , className . length ( ) ) ; } return className ;
public class Filters { /** * Filter that accepts when either filter accepts ( OR ) . */ public static < E > Filter < E > orFilter ( Filter < E > f1 , Filter < E > f2 ) { } }
return ( new CombinedFilter < E > ( f1 , f2 , false ) ) ;
public class AbstractBuilder { /** * Replies if the first parameter is a subtype of the second parameter . * @ param context the context . * @ param subType the subtype to test . * @ param superType the expected super type . * @ return the type reference . */ @ Pure protected boolean isSubTypeOf ( EObject context , JvmTypeReference subType , JvmTypeReference superType ) { } }
if ( isTypeReference ( superType ) && isTypeReference ( subType ) ) { StandardTypeReferenceOwner owner = new StandardTypeReferenceOwner ( services , context ) ; LightweightTypeReferenceFactory factory = new LightweightTypeReferenceFactory ( owner , false ) ; LightweightTypeReference reference = factory . toLightweightReference ( subType ) ; return reference . isSubtypeOf ( superType . getType ( ) ) ; } return false ;
public class MetricValues { /** * Merge { @ code prior } with { @ code latest } . * If { @ code kind } is { @ code MetricKind . DELTA } then the result contains a combination of values in * prior and latest . For all other kinds , it ' s sufficient to return the metric with the latest end * time . * @ param kind the { @ code MetricKind } * @ param prior a { @ code MetricValue } instance * @ param latest a { @ code MetricValue } , expected to be a later version of { @ code prior } * @ return a new { @ code MetricValue } that combines prior and latest depending on { @ code kind } * @ throws IllegalArgumentException if the { @ code prior } and { @ code latest } are have different * types of value , or if the type is not mergeable */ public static MetricValue merge ( MetricKind kind , MetricValue prior , MetricValue latest ) { } }
if ( prior . getValueCase ( ) != latest . getValueCase ( ) ) { log . atWarning ( ) . log ( "Could not merge different types of metric: %s, %s" , prior , latest ) ; throw new IllegalArgumentException ( MSG_CANNOT_MERGE_DIFFERENT_TYPES ) ; } if ( kind == MetricKind . DELTA ) { Builder builder = latest . toBuilder ( ) ; mergeTimestamps ( builder , prior , latest ) ; mergeValues ( builder , prior , latest ) ; return builder . build ( ) ; } else if ( Timestamps . COMPARATOR . compare ( prior . getEndTime ( ) , latest . getEndTime ( ) ) < 0 ) { return latest ; } else { return prior ; }
public class BoundedBuffer { /** * @ awisniew - ADDED * Same as poll , only throws exception if queue is empty * @ see java . util . Queue # remove ( ) */ @ Override public T remove ( ) { } }
T retrievedElement = poll ( ) ; if ( retrievedElement == null ) { throw new NoSuchElementException ( ) ; } return retrievedElement ;
public class R2RMLParser { /** * Get the object atom , it can be a constant , a column or a template * @ param pom * @ param joinCond * @ return * @ throws Exception */ public ImmutableTerm getObjectAtom ( PredicateObjectMap pom , String joinCond ) { } }
ImmutableTerm objectAtom = null ; if ( pom . getObjectMaps ( ) . isEmpty ( ) ) { return null ; } ObjectMap om = pom . getObjectMap ( 0 ) ; String lan = om . getLanguageTag ( ) ; IRI datatype = om . getDatatype ( ) ; // we check if the object map is a constant ( can be a iri or a literal ) // TODO ( xiao ) : toString ( ) is suspicious RDFTerm constantObj = om . getConstant ( ) ; if ( constantObj != null ) { // boolean isURI = false ; // try { // java . net . URI . create ( obj ) ; // isURI = true ; // } catch ( IllegalArgumentException e ) { // if the literal has a language property or a datatype property we // create the function object later if ( lan != null || datatype != null ) { ValueConstant constantLiteral = termFactory . getConstantLiteral ( ( ( Literal ) constantObj ) . getLexicalForm ( ) ) ; objectAtom = constantLiteral ; } else { if ( constantObj instanceof Literal ) { ValueConstant constantLiteral = termFactory . getConstantLiteral ( ( ( Literal ) constantObj ) . getLexicalForm ( ) ) ; Literal constantLit1 = ( Literal ) constantObj ; String lanConstant = om . getLanguageTag ( ) ; IRI datatypeConstant = constantLit1 . getDatatype ( ) ; // we check if it is a literal with language tag if ( lanConstant != null ) { objectAtom = termFactory . getImmutableTypedTerm ( constantLiteral , lanConstant ) ; } // we check if it is a typed literal else if ( datatypeConstant != null ) { RDFDatatype type = typeFactory . getDatatype ( datatypeConstant ) ; objectAtom = termFactory . getImmutableTypedTerm ( constantLiteral , type ) ; } else { objectAtom = constantLiteral ; // . RDFS _ LITERAL ; } } else if ( constantObj instanceof IRI ) { objectAtom = termFactory . getImmutableUriTemplate ( termFactory . getConstantLiteral ( ( ( IRI ) constantObj ) . getIRIString ( ) ) ) ; } } } // we check if the object map is a column // if it has a datatype or language property or its a iri we check it later String col = om . getColumn ( ) ; if ( col != null ) { col = trim ( col ) ; if ( ! joinCond . isEmpty ( ) ) { col = joinCond + col ; } objectAtom = termFactory . getVariable ( col ) ; } // we check if the object map is a template ( can be a iri , a literal or // a blank node ) Template t = om . getTemplate ( ) ; IRI typ = om . getTermType ( ) ; boolean concat = false ; if ( t != null ) { // we check if the template is a literal // then we check if the template includes concat concat = isConcat ( t . toString ( ) ) ; if ( typ . equals ( R2RMLVocabulary . literal ) && ( concat ) ) { objectAtom = getTypedFunction ( t . toString ( ) , 4 , joinCond ) ; } else { // a template can be a rr : IRI , a // rr : Literal or rr : BlankNode // if the literal has a language property or a datatype property // we // create the function object later if ( lan != null || datatype != null ) { String value = t . getColumnName ( 0 ) ; if ( ! joinCond . isEmpty ( ) ) { value = joinCond + value ; } objectAtom = termFactory . getVariable ( value ) ; } else { IRI type = om . getTermType ( ) ; // we check if the template is a IRI a simple literal or a // blank // node and create the function object objectAtom = getTermTypeAtom ( t . toString ( ) , type , joinCond ) ; } } } else { // assign iri template TermMap . TermMapType termMapType = om . getTermMapType ( ) ; if ( termMapType . equals ( TermMap . TermMapType . CONSTANT_VALUED ) ) { } else if ( termMapType . equals ( TermMap . TermMapType . COLUMN_VALUED ) ) { if ( typ . equals ( R2RMLVocabulary . iri ) ) { objectAtom = termFactory . getImmutableUriTemplate ( objectAtom ) ; } } } // we check if it is a literal with language tag if ( lan != null ) { objectAtom = termFactory . getImmutableTypedTerm ( objectAtom , lan ) ; } // we check if it is a typed literal if ( datatype != null ) { RDFDatatype type = typeFactory . getDatatype ( datatype ) ; objectAtom = termFactory . getImmutableTypedTerm ( objectAtom , type ) ; } return objectAtom ;
public class ZooActionBuilder { /** * Adds variable extractor for extracting variable from command response . * @ param jsonPath the json path to reference the value to be extracted * @ param variableName the name of the variable to store the extracted value in * @ return */ public ZooActionBuilder extract ( String jsonPath , String variableName ) { } }
JsonPathVariableExtractor jsonPathVariableExtractor = new JsonPathVariableExtractor ( ) ; Map < String , String > pathVariableMap = new HashMap < > ( ) ; pathVariableMap . put ( jsonPath , variableName ) ; jsonPathVariableExtractor . setJsonPathExpressions ( pathVariableMap ) ; action . addVariableExtractors ( jsonPathVariableExtractor ) ; return this ;
public class WebSocketHandler { /** * { @ inheritDoc } */ @ Override public void messageSent ( IoSession session , Object message ) throws Exception { } }
if ( log . isTraceEnabled ( ) ) { log . trace ( "Message sent (session: {}) read: {} write: {}\n{}" , session . getId ( ) , session . getReadBytes ( ) , session . getWrittenBytes ( ) , String . valueOf ( message ) ) ; }
public class LdapUtils { /** * Reads a Long value from the LdapEntry . * @ param ctx the ldap entry * @ param attribute the attribute name * @ return the long value */ public static Long getLong ( final LdapEntry ctx , final String attribute ) { } }
return getLong ( ctx , attribute , Long . MIN_VALUE ) ;
public class CmsContainerPageCopier { /** * Return the cms object with the site root set to " / " . * @ return the cms object with the site root set to " / " . * @ throws CmsException thrown if initializing the root cms object fails . */ private CmsObject getRootCms ( ) throws CmsException { } }
if ( null == m_rootCms ) { m_rootCms = OpenCms . initCmsObject ( m_cms ) ; m_rootCms . getRequestContext ( ) . setSiteRoot ( "" ) ; } return m_rootCms ;
public class HostName { /** * If this represents an ip address , returns that address . * If this represents a host , returns the resolved ip address of that host . * Otherwise , returns null , but only for strings that are considered valid address strings but cannot be converted to address objects . * This method will throw exceptions for invalid formats and failures to resolve the address . The equivalent method { @ link # getAddress ( ) } will simply return null rather than throw those exceptions . * If you wish to get the represented address and avoid DNS resolution , use { @ link # asAddress ( ) } or { @ link # asAddressString ( ) } * @ return */ @ Override public IPAddress toAddress ( ) throws UnknownHostException , HostNameException { } }
IPAddress addr = resolvedAddress ; if ( addr == null && ! resolvedIsNull ) { // note that validation handles empty address resolution validate ( ) ; synchronized ( this ) { addr = resolvedAddress ; if ( addr == null && ! resolvedIsNull ) { if ( parsedHost . isAddressString ( ) ) { addr = parsedHost . asAddress ( ) ; resolvedIsNull = ( addr == null ) ; // note there is no need to apply prefix or mask here , it would have been applied to the address already } else { String strHost = parsedHost . getHost ( ) ; if ( strHost . length ( ) == 0 && ! validationOptions . emptyIsLoopback ) { addr = null ; resolvedIsNull = true ; } else { // Note we do not set resolvedIsNull , so we will attempt to resolve again if the previous attempt threw an exception InetAddress inetAddress = InetAddress . getByName ( strHost ) ; byte bytes [ ] = inetAddress . getAddress ( ) ; Integer networkPrefixLength = parsedHost . getNetworkPrefixLength ( ) ; if ( networkPrefixLength == null ) { IPAddress mask = parsedHost . getMask ( ) ; if ( mask != null ) { byte maskBytes [ ] = mask . getBytes ( ) ; if ( maskBytes . length != bytes . length ) { throw new HostNameException ( host , "ipaddress.error.ipMismatch" ) ; } for ( int i = 0 ; i < bytes . length ; i ++ ) { bytes [ i ] &= maskBytes [ i ] ; } networkPrefixLength = mask . getBlockMaskPrefixLength ( true ) ; } } IPAddressStringParameters addressParams = validationOptions . addressOptions ; if ( bytes . length == IPv6Address . BYTE_COUNT ) { IPv6AddressCreator creator = addressParams . getIPv6Parameters ( ) . getNetwork ( ) . getAddressCreator ( ) ; addr = creator . createAddressInternal ( bytes , networkPrefixLength , null , this ) ; /* address creation */ } else { IPv4AddressCreator creator = addressParams . getIPv4Parameters ( ) . getNetwork ( ) . getAddressCreator ( ) ; addr = creator . createAddressInternal ( bytes , networkPrefixLength , this ) ; /* address creation */ } } } resolvedAddress = addr ; } } } return addr ;
public class TruthMaintenanceSystem { /** * Adds a justification for the FactHandle to the justifiedMap . * @ param handle * @ param activation * @ param context * @ param rule * @ param typeConf */ public void readLogicalDependency ( final InternalFactHandle handle , final Object object , final Object value , final Activation activation , final PropagationContext context , final RuleImpl rule , final ObjectTypeConf typeConf ) { } }
addLogicalDependency ( handle , object , value , activation , context , rule , typeConf , true ) ;
public class SSLHandlerFactory { /** * This used to create the open ssl context when ocsp stapling is enabled for server . * @ param enableOcsp true / false for enabling ocsp stapling . * @ return ReferenceCountedOpenSslContext . * @ throws SSLException if any error occurs while creating the ReferenceCountedOpenSslContext . */ public ReferenceCountedOpenSslContext getServerReferenceCountedOpenSslContext ( boolean enableOcsp ) throws SSLException { } }
if ( sslConfig . getKeyStore ( ) != null ) { sslContextBuilder = serverContextBuilderWithKs ( SslProvider . OPENSSL ) ; } else { sslContextBuilder = serverContextBuilderWithCerts ( SslProvider . OPENSSL ) ; } setOcspStapling ( sslContextBuilder , enableOcsp ) ; if ( sslConfig . getCipherSuites ( ) != null ) { List < String > ciphers = Arrays . asList ( sslConfig . getCipherSuites ( ) ) ; setCiphers ( sslContextBuilder , ciphers ) ; } setSslProtocol ( sslContextBuilder ) ; ReferenceCountedOpenSslContext referenceCountedOpenSslCtx = ( ReferenceCountedOpenSslContext ) sslContextBuilder . build ( ) ; int sessionTimeout = sslConfig . getSessionTimeOut ( ) ; if ( sessionTimeout > 0 ) { referenceCountedOpenSslCtx . sessionContext ( ) . setSessionTimeout ( sessionTimeout ) ; } return referenceCountedOpenSslCtx ;