signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Validator { /** * Validates that an enum of the given type with the given value exists , and that this enum is * contained in the given list of permitted choices ; finally returns that enum object . */ public < T extends Enum < T > > T validateEnum ( Config config , String value , Class < T > type , T ... choices ) { } }
if ( choices . length == 0 ) { choices = type . getEnumConstants ( ) ; } Preconditions . checkArgument ( choices . length > 0 ) ; try { T result = Enum . valueOf ( type , value ) ; if ( ! Arrays . asList ( choices ) . contains ( result ) ) { throw new IllegalArgumentException ( ) ; } return result ; } catch ( IllegalArgumentException e ) { throw new MorphlineCompilationException ( String . format ( "Invalid choice: '%s' (choose from {%s})" , value , Joiner . on ( "," ) . join ( choices ) ) , config ) ; }
public class SelendroidCapabilities { /** * Returns the application under test in the format of " appName : appVersion " , or " appName " if the supported application * does not have any version associated with it , or returns null if the requested app is not in the apps store . If the * launch activity is also specified with requested application then just return the requested application as app under * test so it can be later installed to the device by SelendroidStandaloneDriver . * @ param supportedApps The list of supported apps in the apps store . * @ return The application under test in " appName " or " appName : appVersion " format , or null if the application is not * in the list of supported apps and the launch activity is not specified . */ public String getDefaultApp ( Set < String > supportedApps ) { } }
String defaultApp = getAut ( ) ; // if the launch activity is specified , just return . if ( getLaunchActivity ( ) != null ) { return defaultApp ; } // App version is not specified . Get the latest version from the apps store . if ( ! defaultApp . contains ( ":" ) ) { return getDefaultVersion ( supportedApps , defaultApp ) ; } return supportedApps . contains ( defaultApp ) ? defaultApp : null ;
public class Extension { /** * Returns a new map of all resources corresponding to the map of resource * paths provided . Each resource will be associated with the mimetype * stored in the given map using its path as the key . * @ param resourceTypes * A map of all paths to their corresponding mimetypes . * @ return * A new , unmodifiable map of resources corresponding to the * collection of paths provided , where the key of each entry in the * map is the path for the resource stored in that entry . */ private Map < String , Resource > getClassPathResources ( Map < String , String > resourceTypes ) { } }
// If no paths are provided , just return an empty map if ( resourceTypes == null ) return Collections . < String , Resource > emptyMap ( ) ; // Add classpath resource for each path / mimetype pair provided Map < String , Resource > resources = new HashMap < String , Resource > ( resourceTypes . size ( ) ) ; for ( Map . Entry < String , String > resource : resourceTypes . entrySet ( ) ) { // Get path and mimetype from entry String path = resource . getKey ( ) ; String mimetype = resource . getValue ( ) ; // Store as path / resource pair resources . put ( path , new ClassPathResource ( classLoader , mimetype , path ) ) ; } // Callers should not rely on modifying the result return Collections . unmodifiableMap ( resources ) ;
public class Configuration { /** * Creates a new Configuration based on the given { @ link com . jayway . jsonpath . spi . mapper . MappingProvider } * @ param newMappingProvider mapping provider to use in new configuration * @ return a new configuration */ public Configuration mappingProvider ( MappingProvider newMappingProvider ) { } }
return Configuration . builder ( ) . jsonProvider ( jsonProvider ) . mappingProvider ( newMappingProvider ) . options ( options ) . evaluationListener ( evaluationListeners ) . build ( ) ;
public class FunctionTypeBuilder { /** * Infer parameters from the params list and info . Also maybe add extra templates . */ FunctionTypeBuilder inferConstructorParameters ( Node argsParent , @ Nullable JSDocInfo info ) { } }
// Look for template parameters in ' info ' : these will be added to anything from the class . if ( info != null ) { setConstructorTemplateTypeNames ( buildTemplateTypesFromJSDocInfo ( info , true ) , argsParent . getParent ( ) ) ; } inferParameterTypes ( argsParent , info ) ; return this ;
public class PluginMergeToolImpl { /** * Removes all nodes contained in the NodeList from the Element . * Convenience method because NodeList objects in the DOM are live . * @ param xEml * @ param nodes */ public static void nodeListRemoveAll ( Element xEml , NodeList nodes ) { } }
int cnt = nodes . getLength ( ) ; for ( int i = 0 ; i < cnt ; i ++ ) xEml . removeChild ( nodes . item ( 0 ) ) ;
public class AbstractMapFragment { /** * Calling this before super . oncreate ( ) when you try to use a toolbar and the view contains a map * https : / / code . google . com / p / android / issues / detail ? id = 175140 */ public static void doMapToolbarWorkaround ( Bundle savedInstanceState ) { } }
// FIXME This is just a workaround to the following error : ClassNotFoundException when unmarshalling android . support . v7 . widget . Toolbar $ SavedState // It seems to be a problem with the SupportMapFragment implementation // https : / / code . google . com / p / android / issues / detail ? id = 175140 if ( savedInstanceState != null ) { SparseArray sparseArray = ( SparseArray ) savedInstanceState . get ( "android:view_state" ) ; if ( sparseArray != null ) { Integer keyToRemove = null ; for ( int i = 0 ; i < sparseArray . size ( ) ; i ++ ) { int key = sparseArray . keyAt ( i ) ; // get the object by the key . Object each = sparseArray . get ( key ) ; if ( each . toString ( ) . startsWith ( "android.support.v7.widget.Toolbar$SavedState" ) ) { keyToRemove = key ; } } if ( keyToRemove != null ) { sparseArray . remove ( keyToRemove ) ; } } }
public class TextCache { /** * Closes the source file and deletes it if it is not read - only . */ void purge ( ) { } }
uncommittedCache . clear ( ) ; try { if ( cacheReadonly ) { close ( false ) ; } else { if ( dataFile != null ) { dataFile . close ( ) ; dataFile = null ; } FileUtil . getDefaultInstance ( ) . delete ( fileName ) ; } } catch ( Exception e ) { throw Error . error ( ErrorCode . FILE_IO_ERROR , ErrorCode . M_TextCache_purging_file_error , new Object [ ] { fileName , e } ) ; }
public class AmazonCloudWatchEventsClient { /** * Adds the specified targets to the specified rule , or updates the targets if they are already associated with the * rule . * Targets are the resources that are invoked when a rule is triggered . * You can configure the following as targets for CloudWatch Events : * < ul > * < li > * EC2 instances * < / li > * < li > * SSM Run Command * < / li > * < li > * SSM Automation * < / li > * < li > * AWS Lambda functions * < / li > * < li > * Data streams in Amazon Kinesis Data Streams * < / li > * < li > * Data delivery streams in Amazon Kinesis Data Firehose * < / li > * < li > * Amazon ECS tasks * < / li > * < li > * AWS Step Functions state machines * < / li > * < li > * AWS Batch jobs * < / li > * < li > * AWS CodeBuild projects * < / li > * < li > * Pipelines in AWS CodePipeline * < / li > * < li > * Amazon Inspector assessment templates * < / li > * < li > * Amazon SNS topics * < / li > * < li > * Amazon SQS queues , including FIFO queues * < / li > * < li > * The default event bus of another AWS account * < / li > * < / ul > * Creating rules with built - in targets is supported only in the AWS Management Console . The built - in targets are * < code > EC2 CreateSnapshot API call < / code > , < code > EC2 RebootInstances API call < / code > , * < code > EC2 StopInstances API call < / code > , and < code > EC2 TerminateInstances API call < / code > . * For some target types , < code > PutTargets < / code > provides target - specific parameters . If the target is a Kinesis * data stream , you can optionally specify which shard the event goes to by using the < code > KinesisParameters < / code > * argument . To invoke a command on multiple EC2 instances with one rule , you can use the * < code > RunCommandParameters < / code > field . * To be able to make API calls against the resources that you own , Amazon CloudWatch Events needs the appropriate * permissions . For AWS Lambda and Amazon SNS resources , CloudWatch Events relies on resource - based policies . For * EC2 instances , Kinesis data streams , and AWS Step Functions state machines , CloudWatch Events relies on IAM roles * that you specify in the < code > RoleARN < / code > argument in < code > PutTargets < / code > . For more information , see < a * href = " https : / / docs . aws . amazon . com / AmazonCloudWatch / latest / events / auth - and - access - control - cwe . html " > Authentication * and Access Control < / a > in the < i > Amazon CloudWatch Events User Guide < / i > . * If another AWS account is in the same region and has granted you permission ( using < code > PutPermission < / code > ) , * you can send events to that account . Set that account ' s event bus as a target of the rules in your account . To * send the matched events to the other account , specify that account ' s event bus as the < code > Arn < / code > value when * you run < code > PutTargets < / code > . If your account sends events to another account , your account is charged for * each sent event . Each event sent to another account is charged as a custom event . The account receiving the event * is not charged . For more information , see < a href = " https : / / aws . amazon . com / cloudwatch / pricing / " > Amazon CloudWatch * Pricing < / a > . * If you are setting the event bus of another account as the target , and that account granted permission to your * account through an organization instead of directly by the account ID , then you must specify a * < code > RoleArn < / code > with proper permissions in the < code > Target < / code > structure . For more information , see < a * href * = " https : / / docs . aws . amazon . com / AmazonCloudWatch / latest / events / CloudWatchEvents - CrossAccountEventDelivery . html " > * Sending and Receiving Events Between AWS Accounts < / a > in the < i > Amazon CloudWatch Events User Guide < / i > . * For more information about enabling cross - account events , see < a > PutPermission < / a > . * < b > Input < / b > , < b > InputPath < / b > , and < b > InputTransformer < / b > are mutually exclusive and optional parameters of a * target . When a rule is triggered due to a matched event : * < ul > * < li > * If none of the following arguments are specified for a target , then the entire event is passed to the target in * JSON format ( unless the target is Amazon EC2 Run Command or Amazon ECS task , in which case nothing from the event * is passed to the target ) . * < / li > * < li > * If < b > Input < / b > is specified in the form of valid JSON , then the matched event is overridden with this constant . * < / li > * < li > * If < b > InputPath < / b > is specified in the form of JSONPath ( for example , < code > $ . detail < / code > ) , then only the part * of the event specified in the path is passed to the target ( for example , only the detail part of the event is * passed ) . * < / li > * < li > * If < b > InputTransformer < / b > is specified , then one or more specified JSONPaths are extracted from the event and * used as values in a template that you specify as the input to the target . * < / li > * < / ul > * When you specify < code > InputPath < / code > or < code > InputTransformer < / code > , you must use JSON dot notation , not * bracket notation . * When you add targets to a rule and the associated rule triggers soon after , new or updated targets might not be * immediately invoked . Allow a short period of time for changes to take effect . * This action can partially fail if too many requests are made at the same time . If that happens , * < code > FailedEntryCount < / code > is non - zero in the response and each entry in < code > FailedEntries < / code > provides * the ID of the failed target and the error code . * @ param putTargetsRequest * @ return Result of the PutTargets operation returned by the service . * @ throws ResourceNotFoundException * An entity that you specified does not exist . * @ throws ConcurrentModificationException * There is concurrent modification on a rule or target . * @ throws LimitExceededException * You tried to create more rules or add more targets to a rule than is allowed . * @ throws ManagedRuleException * This rule was created by an AWS service on behalf of your account . It is managed by that service . If you * see this error in response to < code > DeleteRule < / code > or < code > RemoveTargets < / code > , you can use the * < code > Force < / code > parameter in those calls to delete the rule or remove targets from the rule . You * cannot modify these managed rules by using < code > DisableRule < / code > , < code > EnableRule < / code > , * < code > PutTargets < / code > , < code > PutRule < / code > , < code > TagResource < / code > , or < code > UntagResource < / code > . * @ throws InternalException * This exception occurs due to unexpected causes . * @ sample AmazonCloudWatchEvents . PutTargets * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / events - 2015-10-07 / PutTargets " target = " _ top " > AWS API * Documentation < / a > */ @ Override public PutTargetsResult putTargets ( PutTargetsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutTargets ( request ) ;
public class DialogFragmentUtils { /** * Dismiss { @ link android . app . DialogFragment } for the tag on the loader callbacks with the specified { @ link android . os . Handler } . * @ param handler the handler , in most case , this handler is the main handler . * @ param manager the manager . * @ param tag the tag string that is related to the { @ link android . app . DialogFragment } . */ @ TargetApi ( Build . VERSION_CODES . HONEYCOMB ) public static void dismissOnLoaderCallback ( Handler handler , final android . app . FragmentManager manager , final String tag ) { } }
handler . post ( new Runnable ( ) { @ Override public void run ( ) { android . app . DialogFragment fragment = ( android . app . DialogFragment ) manager . findFragmentByTag ( tag ) ; if ( fragment != null ) { fragment . dismiss ( ) ; } } } ) ;
public class TypeToken { /** * Return true if any of the following conditions is met : * < ul > * < li > ' this ' and { @ code formalType } are equal * < li > { @ code formalType } is { @ code < ? extends Foo > } and ' this ' is a subtype of { @ code Foo } * < li > { @ code formalType } is { @ code < ? super Foo > } and ' this ' is a supertype of { @ code Foo } * < / ul > */ private boolean is ( Type formalType ) { } }
if ( runtimeType . equals ( formalType ) ) { return true ; } if ( formalType instanceof WildcardType ) { // if " formalType " is < ? extends Foo > , " this " can be : // Foo , SubFoo , < ? extends Foo > , < ? extends SubFoo > , < T extends Foo > or // < T extends SubFoo > . // if " formalType " is < ? super Foo > , " this " can be : // Foo , SuperFoo , < ? super Foo > or < ? super SuperFoo > . return every ( ( ( WildcardType ) formalType ) . getUpperBounds ( ) ) . isSupertypeOf ( runtimeType ) && every ( ( ( WildcardType ) formalType ) . getLowerBounds ( ) ) . isSubtypeOf ( runtimeType ) ; } return false ;
public class CheckSignInApi { /** * 帐号登录请求 , 非强制登录 * @ param handler 登录结果回调 */ public void checkSignIn ( final SignInHandler handler ) { } }
HMSAgentLog . i ( "checkSignIn:handler=" + StrUtils . objDesc ( handler ) ) ; if ( this . handler != null ) { HMSAgentLog . e ( "has already a signIn to dispose" ) ; new Handler ( Looper . getMainLooper ( ) ) . post ( new CallbackResultRunnable < SignInHuaweiId > ( handler , HMSAgent . AgentResultCode . REQUEST_REPEATED , null ) ) ; return ; } this . handler = handler ; retryTimes = MAX_RETRY_TIMES ; connect ( ) ;
public class ProxyPathValidator { /** * Sets a restricted proxy policy handler . * @ param id the Oid of the proxy policy to install the handler for . * @ param handler the proxy policy handler . * @ return < code > ProxyPolicyHandler < / code > the previous handler * installed under the specified id . Usually , will be null . */ public ProxyPolicyHandler setProxyPolicyHandler ( String id , ProxyPolicyHandler handler ) { } }
if ( id == null ) { throw new IllegalArgumentException ( i18n . getMessage ( "proxyPolicyId" ) ) ; } if ( handler == null ) { throw new IllegalArgumentException ( i18n . getMessage ( "proxyPolicyHandler" ) ) ; } if ( this . proxyPolicyHandlers == null ) { this . proxyPolicyHandlers = new Hashtable ( ) ; } return ( ProxyPolicyHandler ) this . proxyPolicyHandlers . put ( id , handler ) ;
public class ClustersInner { /** * Checks that the cluster name is valid and is not already in use . * @ param location Azure location . * @ param name Cluster name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < CheckNameResultInner > checkNameAvailabilityAsync ( String location , String name , final ServiceCallback < CheckNameResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( checkNameAvailabilityWithServiceResponseAsync ( location , name ) , serviceCallback ) ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the cp definition option value rels before and after the current cp definition option value rel in the ordered set where companyId = & # 63 ; . * @ param CPDefinitionOptionValueRelId the primary key of the current cp definition option value rel * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp definition option value rel * @ throws NoSuchCPDefinitionOptionValueRelException if a cp definition option value rel with the primary key could not be found */ @ Override public CPDefinitionOptionValueRel [ ] findByCompanyId_PrevAndNext ( long CPDefinitionOptionValueRelId , long companyId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) throws NoSuchCPDefinitionOptionValueRelException { } }
CPDefinitionOptionValueRel cpDefinitionOptionValueRel = findByPrimaryKey ( CPDefinitionOptionValueRelId ) ; Session session = null ; try { session = openSession ( ) ; CPDefinitionOptionValueRel [ ] array = new CPDefinitionOptionValueRelImpl [ 3 ] ; array [ 0 ] = getByCompanyId_PrevAndNext ( session , cpDefinitionOptionValueRel , companyId , orderByComparator , true ) ; array [ 1 ] = cpDefinitionOptionValueRel ; array [ 2 ] = getByCompanyId_PrevAndNext ( session , cpDefinitionOptionValueRel , companyId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class BackboneGeneration { /** * Computes the backbone for a given collection of formulas w . r . t . a collection of variables and a backbone type . * @ param formulas the given collection of formulas * @ param variables the given collection of relevant variables for the backbone computation * @ param type the type of backbone variables that should be computed * @ return the backbone or { @ code null } if the formula is UNSAT */ public static Backbone compute ( final Collection < Formula > formulas , final Collection < Variable > variables , final BackboneType type ) { } }
solver . reset ( ) ; solver . add ( formulas ) ; return solver . compute ( variables , type ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getTileSetColor ( ) { } }
if ( tileSetColorEClass == null ) { tileSetColorEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 396 ) ; } return tileSetColorEClass ;
public class Builder { /** * This method is not called " parse " to avoid confusion with file parsing methods */ public Document parseString ( String text ) throws SAXException { } }
try { return parse ( new InputSource ( new StringReader ( text ) ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "unexpected world exception while reading memory stream" , e ) ; }
public class AbstractValidateableDialogBuilder { /** * Adds all validators , which are contained by a specific collection and should be executed when * the positive button of the dialog , which is created by the builder , is clicked . * @ param validators * A collection , which contains all validators , which should be added , as an instance of * the type { @ link Collection } or an empty collection , if no validators should be added * @ return The builder , the method has been called upon , as an instance of the generic type * BuilderType */ public final BuilderType addAllDialogValidators ( @ NonNull final Collection < DialogValidator > validators ) { } }
Condition . INSTANCE . ensureNotNull ( validators , "The collection may not be null" ) ; getProduct ( ) . addAllDialogValidators ( validators ) ; return self ( ) ;
public class HttpConnection { /** * / * Exception reporting policy method . * @ param e the Throwable to report . */ private void exception ( Throwable e ) { } }
try { _persistent = false ; int error_code = HttpResponse . __500_Internal_Server_Error ; if ( e instanceof HttpException ) { error_code = ( ( HttpException ) e ) . getCode ( ) ; if ( _request == null ) log . warn ( e . toString ( ) ) ; else log . warn ( _request . getRequestLine ( ) + " " + e . toString ( ) ) ; log . debug ( LogSupport . EXCEPTION , e ) ; } else if ( e instanceof EOFException ) { LogSupport . ignore ( log , e ) ; return ; } else { _request . setAttribute ( "javax.servlet.error.exception_type" , e . getClass ( ) ) ; _request . setAttribute ( "javax.servlet.error.exception" , e ) ; if ( _request == null ) log . warn ( LogSupport . EXCEPTION , e ) ; else log . warn ( _request . getRequestLine ( ) , e ) ; } if ( _response != null && ! _response . isCommitted ( ) ) { _response . reset ( ) ; _response . removeField ( HttpFields . __TransferEncoding ) ; _response . setField ( HttpFields . __Connection , HttpFields . __Close ) ; _response . sendError ( error_code ) ; } } catch ( Exception ex ) { LogSupport . ignore ( log , ex ) ; }
public class InternalXtextParser { /** * InternalXtext . g : 1429:1 : ruleEnumLiterals : ( ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) ) ; */ public final void ruleEnumLiterals ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXtext . g : 1433:2 : ( ( ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) ) ) // InternalXtext . g : 1434:2 : ( ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) ) { // InternalXtext . g : 1434:2 : ( ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) ) // InternalXtext . g : 1435:3 : ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) { before ( grammarAccess . getEnumLiteralsAccess ( ) . getGroup ( ) ) ; // InternalXtext . g : 1436:3 : ( rule _ _ EnumLiterals _ _ Group _ _ 0 ) // InternalXtext . g : 1436:4 : rule _ _ EnumLiterals _ _ Group _ _ 0 { pushFollow ( FollowSets000 . FOLLOW_2 ) ; rule__EnumLiterals__Group__0 ( ) ; state . _fsp -- ; } after ( grammarAccess . getEnumLiteralsAccess ( ) . getGroup ( ) ) ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class ApiRetryStrategyManager { /** * Get the { @ link ApiRetryStrategy } for the specified AdWords API service / utility name . * @ param name the specified AdWords API service / utility name * @ param isUtility whether this is for some AdWords API utility , i . e . , from calling { @ link * com . google . api . ads . adwords . lib . factory . AdWordsServicesInterface # getUtility ( com . google . api . ads . adwords . lib . client . AdWordsSession , * Class ) } . * @ return the corresponding { @ link ApiRetryStrategy } object , or null if it ' s not supported by * this rate limiter extension */ public static @ Nullable ApiRetryStrategy getRetryStrategy ( String name , boolean isUtility ) { } }
ApiRateLimitBucket bucket = getRateLimitBucket ( name , isUtility ) ; return bucket == null ? null : bucketToStrategy . get ( bucket ) ;
public class CmsSearchIndex { /** * Returns the Lucene document with the given root path from the index . < p > * @ param rootPath the root path of the document to get * @ return the Lucene document with the given root path from the index * @ deprecated Use { @ link # getDocument ( String , String ) } instead and provide { @ link org . opencms . search . fields . CmsLuceneField # FIELD _ PATH } as field to search in */ @ Deprecated public Document getDocument ( String rootPath ) { } }
if ( getDocument ( CmsSearchField . FIELD_PATH , rootPath ) != null ) { return ( Document ) getDocument ( CmsSearchField . FIELD_PATH , rootPath ) . getDocument ( ) ; } return null ;
public class EnableUserRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EnableUserRequest enableUserRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( enableUserRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( enableUserRequest . getUserName ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( enableUserRequest . getAuthenticationType ( ) , AUTHENTICATIONTYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MsvcProjectWriter { /** * Writes compiler options . * @ param writer * Writer writer * @ param isDebug * true if debug . * @ param baseDir * String base directory * @ param compilerConfig * compiler configuration * @ throws IOException * if error on writing project */ private void writeCompileOptions ( final Writer writer , final boolean isDebug , final String baseDir , final CommandLineCompilerConfiguration compilerConfig ) throws IOException { } }
final StringBuffer baseOptions = new StringBuffer ( 50 ) ; baseOptions . append ( "# ADD BASE CPP" ) ; final StringBuffer options = new StringBuffer ( 50 ) ; options . append ( "# ADD CPP" ) ; final File [ ] includePath = compilerConfig . getIncludePath ( ) ; for ( final File element : includePath ) { options . append ( " /I \"" ) ; final String relPath = CUtil . getRelativePath ( baseDir , element ) ; options . append ( CUtil . toWindowsPath ( relPath ) ) ; options . append ( '"' ) ; } final Hashtable < String , String > optionMap = new Hashtable < > ( ) ; if ( isDebug ) { // release options that should be mapped to debug counterparts optionMap . put ( "/MT" , "/MTd" ) ; optionMap . put ( "/ML" , "/MLd" ) ; optionMap . put ( "/MD" , "/MDd" ) ; optionMap . put ( "/O2" , "/Od" ) ; optionMap . put ( "/O3" , "/Od" ) ; } else { // debug options that should be mapped to release counterparts optionMap . put ( "/MTD" , "/MT" ) ; optionMap . put ( "/MLD" , "/ML" ) ; optionMap . put ( "/MDD" , "/MD" ) ; optionMap . put ( "/GM" , "" ) ; optionMap . put ( "/ZI" , "" ) ; optionMap . put ( "/OD" , "/O2" ) ; optionMap . put ( "/GZ" , "" ) ; } final String [ ] preArgs = compilerConfig . getPreArguments ( ) ; for ( final String preArg : preArgs ) { if ( preArg . startsWith ( "/D" ) ) { options . append ( " /D " ) ; baseOptions . append ( " /D " ) ; final String body = preArg . substring ( 2 ) ; if ( preArg . indexOf ( '=' ) >= 0 ) { options . append ( body ) ; baseOptions . append ( body ) ; } else { final StringBuffer buf = new StringBuffer ( "\"" ) ; if ( "NDEBUG" . equals ( body ) || "_DEBUG" . equals ( body ) ) { if ( isDebug ) { buf . append ( "_DEBUG" ) ; } else { buf . append ( "NDEBUG" ) ; } } else { buf . append ( body ) ; } buf . append ( "\"" ) ; options . append ( buf ) ; baseOptions . append ( buf ) ; } } else if ( ! preArg . startsWith ( "/I" ) ) { String option = preArg ; final String key = option . toUpperCase ( Locale . US ) ; if ( optionMap . containsKey ( key ) ) { option = optionMap . get ( key ) ; } options . append ( " " ) ; options . append ( option ) ; baseOptions . append ( " " ) ; baseOptions . append ( option ) ; } } baseOptions . append ( "\r\n" ) ; options . append ( "\r\n" ) ; writer . write ( baseOptions . toString ( ) ) ; writer . write ( options . toString ( ) ) ;
public class JsonSchema { /** * Fetches the nested or primitive array items type from schema . * @ return * @ throws DataConversionException */ public Type getTypeOfArrayItems ( ) throws DataConversionException { } }
JsonSchema arrayValues = getItemsWithinDataType ( ) ; if ( arrayValues == null ) { throw new DataConversionException ( "Array types only allow values as primitive, null or JsonObject" ) ; } return arrayValues . getType ( ) ;
public class CreateQualificationTypeRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateQualificationTypeRequest createQualificationTypeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createQualificationTypeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createQualificationTypeRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getKeywords ( ) , KEYWORDS_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getQualificationTypeStatus ( ) , QUALIFICATIONTYPESTATUS_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getRetryDelayInSeconds ( ) , RETRYDELAYINSECONDS_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getTest ( ) , TEST_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getAnswerKey ( ) , ANSWERKEY_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getTestDurationInSeconds ( ) , TESTDURATIONINSECONDS_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getAutoGranted ( ) , AUTOGRANTED_BINDING ) ; protocolMarshaller . marshall ( createQualificationTypeRequest . getAutoGrantedValue ( ) , AUTOGRANTEDVALUE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KernelDeviceProfile { /** * Elapsed time for all events { @ code from } through { @ code to } for the last thread that executed this KernelDeviceProfile * instance respective kernel and device . * @ param from the first event to consider that defines the elapsed period start * @ param to the last event to consider for elapsed period */ public double getElapsedTimeLastThread ( int from , int to ) { } }
Accumulator acc = lastAccumulator . get ( ) ; return acc == null ? Double . NaN : ( acc . currentTimes [ to ] - acc . currentTimes [ from ] ) / MILLION ;
public class FelixGogoCommandsServiceGenerator { /** * Generate CommandProvider class and instance for this class based on parameters * @ param service commands service * @ param commands commands map ( name = help ) * @ param suffix unique class suffix * @ return generated CommandProvider instance * @ throws Exception if something went wrong */ public static Object generate ( Object service , Map < String , String > commands , String suffix ) throws Exception { } }
// generate class with unique name CtClass ctClass = POOL . makeClass ( AbstractFelixCommandsService . class . getName ( ) + suffix ) ; try { if ( ! ctClass . isFrozen ( ) ) { ClassFile ccFile = ctClass . getClassFile ( ) ; ccFile . setVersionToJava5 ( ) ; ConstPool constPool = ccFile . getConstPool ( ) ; // set superclass CtClass abstractCtClass = POOL . getCtClass ( AbstractFelixCommandsService . class . getName ( ) ) ; ctClass . setSuperclass ( abstractCtClass ) ; // add constructor CtClass serviceCtClass = POOL . getCtClass ( Object . class . getName ( ) ) ; CtConstructor ctConstructor = new CtConstructor ( new CtClass [ ] { serviceCtClass } , ctClass ) ; ctConstructor . setModifiers ( Modifier . PUBLIC ) ; ctConstructor . setBody ( "super($1);" ) ; ctClass . addConstructor ( ctConstructor ) ; // add method for each command CtClass sessionCtClass = POOL . getCtClass ( CommandSession . class . getName ( ) ) ; CtClass stringArrayCtClass = POOL . getCtClass ( String [ ] . class . getName ( ) ) ; Set < String > names = commands . keySet ( ) ; for ( String name : names ) { if ( isMethodAvailable ( service , name ) ) { CtMethod ctMethod = new CtMethod ( CtClass . voidType , name , new CtClass [ ] { sessionCtClass , stringArrayCtClass } , ctClass ) ; ctMethod . setModifiers ( Modifier . PUBLIC ) ; ctMethod . setBody ( "runCommand(\"" + name + "\", $1, $2);" ) ; ctClass . addMethod ( ctMethod ) ; // add GoGo descriptor for this shell command AnnotationsAttribute annotationsAttribute = new AnnotationsAttribute ( constPool , AnnotationsAttribute . visibleTag ) ; Annotation annotation = new Annotation ( Descriptor . class . getName ( ) , constPool ) ; annotation . addMemberValue ( "value" , new StringMemberValue ( commands . get ( name ) , constPool ) ) ; annotationsAttribute . addAnnotation ( annotation ) ; ctMethod . getMethodInfo ( ) . addAttribute ( annotationsAttribute ) ; } } } // create new instance Class < ? > aClass = ctClass . toClass ( FelixGogoCommandsServiceGenerator . class . getClassLoader ( ) ) ; Constructor < ? > constructor = aClass . getConstructor ( Object . class ) ; return constructor . newInstance ( service ) ; } catch ( Exception e ) { ctClass . detach ( ) ; throw e ; }
public class ResetCacheParameterGroupRequest { /** * An array of parameter names to reset to their default values . If < code > ResetAllParameters < / code > is * < code > true < / code > , do not use < code > ParameterNameValues < / code > . If < code > ResetAllParameters < / code > is * < code > false < / code > , you must specify the name of at least one parameter to reset . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setParameterNameValues ( java . util . Collection ) } or { @ link # withParameterNameValues ( java . util . Collection ) } * if you want to override the existing values . * @ param parameterNameValues * An array of parameter names to reset to their default values . If < code > ResetAllParameters < / code > is * < code > true < / code > , do not use < code > ParameterNameValues < / code > . If < code > ResetAllParameters < / code > is * < code > false < / code > , you must specify the name of at least one parameter to reset . * @ return Returns a reference to this object so that method calls can be chained together . */ public ResetCacheParameterGroupRequest withParameterNameValues ( ParameterNameValue ... parameterNameValues ) { } }
if ( this . parameterNameValues == null ) { setParameterNameValues ( new com . amazonaws . internal . SdkInternalList < ParameterNameValue > ( parameterNameValues . length ) ) ; } for ( ParameterNameValue ele : parameterNameValues ) { this . parameterNameValues . add ( ele ) ; } return this ;
public class CouchbaseSchemaManager { /** * Adds the bucket . * @ param name * the name */ private void addBucket ( String name ) { } }
String qouta = csmd . getBucketProperty ( "bucket.quota" ) ; int bucketQuota = qouta != null ? Integer . parseInt ( qouta ) : DEFAULT_RAM_SIZE_IN_MB ; BucketSettings bucketSettings = new DefaultBucketSettings . Builder ( ) . type ( BucketType . COUCHBASE ) . name ( name ) . quota ( bucketQuota ) . build ( ) ; try { clusterManager . insertBucket ( bucketSettings ) ; LOGGER . info ( "Bucket [" + name + "] is added!" ) ; } catch ( CouchbaseException ex ) { LOGGER . error ( "Not able to add bucket [" + name + "]." , ex ) ; throw new KunderaException ( "Not able to add bucket [" + name + "]." , ex ) ; }
public class TopLevelDomainsInner { /** * Gets all legal agreements that user needs to accept before purchasing a domain . * Gets all legal agreements that user needs to accept before purchasing a domain . * ServiceResponse < PageImpl < TldLegalAgreementInner > > * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; TldLegalAgreementInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < TldLegalAgreementInner > > > listAgreementsNextSinglePageAsync ( final String nextPageLink ) { } }
if ( nextPageLink == null ) { throw new IllegalArgumentException ( "Parameter nextPageLink is required and cannot be null." ) ; } String nextUrl = String . format ( "%s" , nextPageLink ) ; return service . listAgreementsNext ( nextUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < TldLegalAgreementInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < TldLegalAgreementInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < TldLegalAgreementInner > > result = listAgreementsNextDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < TldLegalAgreementInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class AbstractKTypeCollection { /** * Default implementation uses a predicate for removal . */ @ Override public int removeAll ( final KTypeLookupContainer < ? super KType > c ) { } }
// We know c holds sub - types of KType and we ' re not modifying c , so go unchecked . return this . removeAll ( new KTypePredicate < KType > ( ) { public boolean apply ( KType k ) { return c . contains ( k ) ; } } ) ;
public class ProviderContext { /** * Creates a new ProviderContext based on the provider id and rate type ( s ) . * @ param provider the provider id , not null . * @ return a new { @ link ProviderContext } instance . */ public static ProviderContext of ( String provider ) { } }
return ProviderContextBuilder . of ( provider , RateType . ANY ) . build ( ) ;
public class MountTargetDescriptionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( MountTargetDescription mountTargetDescription , ProtocolMarshaller protocolMarshaller ) { } }
if ( mountTargetDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( mountTargetDescription . getOwnerId ( ) , OWNERID_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getMountTargetId ( ) , MOUNTTARGETID_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getFileSystemId ( ) , FILESYSTEMID_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getSubnetId ( ) , SUBNETID_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getLifeCycleState ( ) , LIFECYCLESTATE_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getIpAddress ( ) , IPADDRESS_BINDING ) ; protocolMarshaller . marshall ( mountTargetDescription . getNetworkInterfaceId ( ) , NETWORKINTERFACEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class HintScanner { /** * Finds the JSON - HOME hints on the given method . * @ param method the method to scan * @ return the hints */ public Hints findHint ( Method method ) { } }
Hints hints = new Hints ( ) ; for ( Annotation annotation : method . getDeclaredAnnotations ( ) ) { findAllow ( hints , annotation ) ; findFormats ( hints , annotation ) ; } return hints ;
public class DescribeHsmClientCertificatesRequest { /** * A tag value or values for which you want to return all matching HSM client certificates that are associated with * the specified tag value or values . For example , suppose that you have HSM client certificates that are tagged * with values called < code > admin < / code > and < code > test < / code > . If you specify both of these tag values in the * request , Amazon Redshift returns a response with the HSM client certificates that have either or both of these * tag values associated with them . * @ param tagValues * A tag value or values for which you want to return all matching HSM client certificates that are * associated with the specified tag value or values . For example , suppose that you have HSM client * certificates that are tagged with values called < code > admin < / code > and < code > test < / code > . If you specify * both of these tag values in the request , Amazon Redshift returns a response with the HSM client * certificates that have either or both of these tag values associated with them . */ public void setTagValues ( java . util . Collection < String > tagValues ) { } }
if ( tagValues == null ) { this . tagValues = null ; return ; } this . tagValues = new com . amazonaws . internal . SdkInternalList < String > ( tagValues ) ;
public class HttpTracerUtils { /** * Parse tracer key * @ param tracerMap tracer map * @ param key tracer key * @ param value tracer value */ public static void parseTraceKey ( Map < String , String > tracerMap , String key , String value ) { } }
String lowKey = key . substring ( PREFIX . length ( ) ) ; String realKey = TRACER_KEY_MAP . get ( lowKey ) ; tracerMap . put ( realKey == null ? lowKey : realKey , value ) ;
public class HelloServiceImpl { /** * Sends a { @ link HelloReply } using { @ code blockingTaskExecutor } . * @ see < a href = " https : / / line . github . io / armeria / server - grpc . html # blocking - service - implementation " > Blocking * service implementation < / a > */ @ Override public void blockingHello ( HelloRequest request , StreamObserver < HelloReply > responseObserver ) { } }
// Unlike upstream gRPC - Java , Armeria does not run service logic in a separate thread pool by default . // Therefore , this method will run in the event loop , which means that you can suffer the performance // degradation if you call a blocking API in this method . In this case , you have the following options : // 1 . Call a blocking API in the blockingTaskExecutor provided by Armeria . // 2 . Set GrpcServiceBuilder . useBlockingTaskExecutor ( true ) when building your GrpcService . // 3 . Call a blocking API in the separate thread pool you manage . // In this example , we chose the option 1: final ServiceRequestContext ctx = RequestContext . current ( ) ; ctx . blockingTaskExecutor ( ) . submit ( ( ) -> { try { // Simulate a blocking API call . Thread . sleep ( 3000 ) ; } catch ( Exception ignored ) { // Do nothing . } responseObserver . onNext ( buildReply ( toMessage ( request . getName ( ) ) ) ) ; responseObserver . onCompleted ( ) ; } ) ;
public class RenderVisitorAssistantForMsgs { /** * Private helper for visitMsgFallbackGroupNode ( ) to render a message from its translation . */ private void renderMsgFromTranslation ( MsgNode msg , ImmutableList < SoyMsgPart > msgParts , @ Nullable ULocale locale ) { } }
SoyMsgPart firstPart = msgParts . get ( 0 ) ; if ( firstPart instanceof SoyMsgPluralPart ) { new PlrselMsgPartsVisitor ( msg , locale ) . visitPart ( ( SoyMsgPluralPart ) firstPart ) ; } else if ( firstPart instanceof SoyMsgSelectPart ) { new PlrselMsgPartsVisitor ( msg , locale ) . visitPart ( ( SoyMsgSelectPart ) firstPart ) ; } else { for ( SoyMsgPart msgPart : msgParts ) { if ( msgPart instanceof SoyMsgRawTextPart ) { RenderVisitor . append ( master . getCurrOutputBufForUseByAssistants ( ) , ( ( SoyMsgRawTextPart ) msgPart ) . getRawText ( ) ) ; } else if ( msgPart instanceof SoyMsgPlaceholderPart ) { String placeholderName = ( ( SoyMsgPlaceholderPart ) msgPart ) . getPlaceholderName ( ) ; visit ( msg . getRepPlaceholderNode ( placeholderName ) ) ; } else { throw new AssertionError ( ) ; } } }
public class PropagateTransform { /** * ~ Methods * * * * * */ private void _propagateMetricTransform ( Metric metric , long windowSizeInSeconds , QueryContext queryContext ) { } }
// if the datapoint set is empty or has a single datapoint , return directly if ( metric . getDatapoints ( ) . isEmpty ( ) || metric . getDatapoints ( ) . size ( ) == 1 ) { return ; } Long [ ] startAndEndTimestamps = QueryUtils . getStartAndEndTimesWithMaxInterval ( queryContext ) ; Map < Long , Double > propagateDatapoints = new TreeMap < > ( ) ; Map < Long , Double > sortedDatapoints = new TreeMap < > ( metric . getDatapoints ( ) ) ; Long [ ] sortedTimestamps = new Long [ sortedDatapoints . size ( ) ] ; sortedDatapoints . keySet ( ) . toArray ( sortedTimestamps ) ; Long startTimestamp = sortedTimestamps [ 0 ] ; Long endTimestamp = Math . max ( sortedTimestamps [ sortedTimestamps . length - 1 ] , startAndEndTimestamps [ 1 ] ) ; // create a new datapoints map propagateDatpoints , which have all the // expected timestamps , then fill the missing value int index = 1 ; while ( startTimestamp <= endTimestamp ) { propagateDatapoints . put ( startTimestamp , sortedDatapoints . containsKey ( startTimestamp ) ? sortedDatapoints . get ( startTimestamp ) : null ) ; if ( index >= sortedDatapoints . size ( ) ) { startTimestamp = startTimestamp + windowSizeInSeconds * 1000 ; continue ; } if ( ( startTimestamp + windowSizeInSeconds * 1000 ) < sortedTimestamps [ index ] ) { startTimestamp = startTimestamp + windowSizeInSeconds * 1000 ; } else { startTimestamp = sortedTimestamps [ index ] ; index ++ ; } } int newLength = propagateDatapoints . size ( ) ; List < Long > newTimestamps = new ArrayList < Long > ( ) ; List < Double > newValues = new ArrayList < > ( ) ; for ( Map . Entry < Long , Double > entry : propagateDatapoints . entrySet ( ) ) { newTimestamps . add ( entry . getKey ( ) ) ; newValues . add ( entry . getValue ( ) ) ; } Double prev = null ; for ( int i = 0 ; i < newLength ; i ++ ) { if ( newValues . get ( i ) != null ) { prev = newValues . get ( i ) ; } else { propagateDatapoints . put ( newTimestamps . get ( i ) , prev ) ; } } metric . setDatapoints ( propagateDatapoints ) ;
public class MithraAbstractDatabaseObject { /** * batchUpdateForSameSourceAttribute Start - - - - - */ protected void zBatchUpdateForSameSourceAttribute ( List < UpdateOperation > updateOperations , BatchUpdateOperation batchUpdateOperation ) { } }
UpdateOperation firstOperation = updateOperations . get ( 0 ) ; MithraDataObject firstData = this . getDataForUpdate ( firstOperation ) ; Object source = this . getSourceAttributeValueFromObjectGeneric ( firstData ) ; DatabaseType databaseType = this . getDatabaseTypeGenericSource ( source ) ; if ( databaseType . getUpdateViaInsertAndJoinThreshold ( ) > 0 && databaseType . getUpdateViaInsertAndJoinThreshold ( ) < updateOperations . size ( ) && this . getFinder ( ) . getVersionAttribute ( ) == null && ! batchUpdateOperation . isIncrement ( ) && batchUpdateOperation . isEligibleForUpdateViaJoin ( ) ) { zBatchUpdateViaInsertAndJoin ( updateOperations , source , databaseType ) ; return ; } if ( this . hasOptimisticLocking ( ) ) { if ( this . getMithraObjectPortal ( ) . getTxParticipationMode ( ) . isOptimisticLocking ( ) && ! databaseType . canCombineOptimisticWithBatchUpdates ( ) ) { // we ' ll do single updates for ( int i = 0 ; i < updateOperations . size ( ) ; i ++ ) { UpdateOperation updateOperation = updateOperations . get ( i ) ; zUpdate ( updateOperation . getMithraObject ( ) , updateOperation . getUpdates ( ) ) ; } return ; } } List firstUpdateWrappers = firstOperation . getUpdates ( ) ; StringBuilder builder = new StringBuilder ( 30 + firstUpdateWrappers . size ( ) * 12 ) ; builder . append ( "update " ) ; builder . append ( this . getFullyQualifiedTableNameGenericSource ( source ) ) . append ( " set " ) ; for ( int i = 0 ; i < firstUpdateWrappers . size ( ) ; i ++ ) { AttributeUpdateWrapper wrapper = ( AttributeUpdateWrapper ) firstUpdateWrappers . get ( i ) ; if ( i > 0 ) { builder . append ( ", " ) ; } builder . append ( wrapper . getSetAttributeSql ( ) ) ; } builder . append ( this . getSqlWhereClauseForBatchUpdateForSameSourceAttribute ( firstData ) ) ; String sql = builder . toString ( ) ; Connection con = null ; PreparedStatement stm = null ; try { con = this . getConnectionForWriteGenericSource ( source ) ; TimeZone databaseTimeZone = this . getDatabaseTimeZoneGenericSource ( source ) ; if ( this . getSqlLogger ( ) . isDebugEnabled ( ) ) { this . logWithSource ( this . getSqlLogger ( ) , source , "batch update of " + updateOperations . size ( ) + " objects with: " + sql ) ; } PrintablePreparedStatement pps = null ; if ( this . getBatchSqlLogger ( ) . isDebugEnabled ( ) ) { pps = new PrintablePreparedStatement ( sql ) ; } stm = con . prepareStatement ( sql ) ; int batchSize = databaseType . getMaxPreparedStatementBatchCount ( firstOperation . getUpdates ( ) . size ( ) + this . getMithraObjectPortal ( ) . getFinder ( ) . getPrimaryKeyAttributes ( ) . length ) ; if ( batchSize < 0 ) { batchSize = updateOperations . size ( ) ; } int objectsInBatch = 0 ; int batchStart = 0 ; for ( int u = 0 ; u < updateOperations . size ( ) ; u ++ ) { UpdateOperation operation = updateOperations . get ( u ) ; MithraDataObject data = this . getDataForUpdate ( operation ) ; if ( this . getBatchSqlLogger ( ) . isDebugEnabled ( ) ) { pps . clearParameters ( ) ; int pos = operation . setSqlParameters ( pps , databaseTimeZone , databaseType ) ; this . setPrimaryKeyAttributes ( pps , pos , data , databaseTimeZone , databaseType ) ; this . logWithSource ( this . getBatchSqlLogger ( ) , source , "batch updating with: " + pps . getPrintableStatement ( ) ) ; } int pos = operation . setSqlParameters ( stm , databaseTimeZone , databaseType ) ; this . setPrimaryKeyAttributes ( stm , pos , data , databaseTimeZone , databaseType ) ; operation . setUpdated ( ) ; stm . addBatch ( ) ; objectsInBatch ++ ; if ( objectsInBatch == batchSize ) { this . executeBatchForUpdateOperations ( stm , updateOperations , batchStart ) ; objectsInBatch = 0 ; batchStart = u + 1 ; } } if ( objectsInBatch > 0 ) { this . executeBatchForUpdateOperations ( stm , updateOperations , batchStart ) ; } stm . close ( ) ; stm = null ; String dbid = this . getDatabaseIdentifierGenericSource ( source ) ; getNotificationEventManager ( ) . addMithraNotificationEventForBatchUpdate ( dbid , this . getFullyQualifiedFinderClassName ( ) , MithraNotificationEvent . UPDATE , updateOperations , firstUpdateWrappers , source ) ; } catch ( SQLException e ) { this . analyzeAndWrapSqlExceptionGenericSource ( "batch update failed " + e . getMessage ( ) , e , source , con ) ; } finally { this . closeStatementAndConnection ( con , stm ) ; }
public class BaseMessagingEngineImpl { /** * Will return null for liberty * ( non - Javadoc ) * @ see com . ibm . ws . sib . admin . JsMessagingEngine # getBus ( ) */ public final LWMConfig getBus ( ) { } }
String thisMethodName = "getBus" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , thisMethodName , this ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , thisMethodName ) ; } return this . _bus ;
public class JScreen { /** * Add the screen controls to the second column of the grid . * @ param parent The container to add the control ( s ) to . * @ param gridbag The screen layout . * @ param c The constraint to use . */ public void addScreenControls ( Container parent ) { } }
GridBagConstraints c = this . getGBConstraints ( ) ; c . weightx = 1.0 ; // Grow edit and scroll pane but not label c . anchor = GridBagConstraints . NORTHWEST ; // Edit boxes left justified c . gridx = 1 ; // Column 1 c . gridy = GridBagConstraints . RELATIVE ; // Bump Row each time c . gridwidth = 3 ; // end column for ( int iIndex = 0 ; ; iIndex ++ ) { Converter converter = this . getFieldForScreen ( iIndex ) ; if ( converter == SKIP_THIS_FIELD ) continue ; if ( converter == null ) break ; this . addScreenControl ( parent , converter ) ; }
public class MetricUtils { /** * make taskId = 0 and streamId empty and metricName remain the string after ` . ` . */ public static String task2MergeCompName ( String old ) { } }
String [ ] parts = old . split ( DELIM ) ; if ( parts . length >= 7 ) { parts [ 0 ] = MetaType . COMPONENT . getV ( ) + parts [ 0 ] . charAt ( 1 ) ; parts [ parts . length - 3 ] = EMPTY ; parts [ parts . length - 4 ] = "0" ; String metricName = getMergeMetricName ( parts [ parts . length - 1 ] ) ; parts [ parts . length - 1 ] = metricName ; } return concat ( parts ) ;
public class FileDefinitionParser { /** * Reads a definition file . * @ return an instance of { @ link FileDefinition } ( never null ) * Parsing errors are stored in the result . < br > * See { @ link FileDefinition # getParsingErrors ( ) } . */ public FileDefinition read ( ) { } }
// Parse blocks try { fillIn ( ) ; mergeContiguousRegions ( this . definitionFile . getBlocks ( ) ) ; } catch ( IOException e ) { addModelError ( ErrorCode . P_IO_ERROR , this . currentLineNumber , exception ( e ) ) ; } // Determine file type boolean hasFacets = false , hasComponents = false , hasInstances = false , hasImports = false ; int ignorableBlocksCount = 0 ; for ( AbstractBlock block : this . definitionFile . getBlocks ( ) ) { if ( block . getInstructionType ( ) == AbstractBlock . COMPONENT ) hasComponents = true ; else if ( block . getInstructionType ( ) == AbstractBlock . FACET ) hasFacets = true ; else if ( block . getInstructionType ( ) == AbstractBlock . INSTANCEOF ) hasInstances = true ; else if ( block . getInstructionType ( ) == AbstractBlock . IMPORT ) hasImports = true ; else if ( block instanceof AbstractIgnorableInstruction ) ignorableBlocksCount ++ ; } if ( hasInstances ) { if ( ! hasFacets && ! hasComponents ) this . definitionFile . setFileType ( FileDefinition . INSTANCE ) ; else addModelError ( ErrorCode . P_INVALID_FILE_TYPE , 1 ) ; } else if ( hasFacets || hasComponents ) { this . definitionFile . setFileType ( FileDefinition . GRAPH ) ; } else if ( hasImports ) { this . definitionFile . setFileType ( FileDefinition . AGGREGATOR ) ; } else if ( ignorableBlocksCount == this . definitionFile . getBlocks ( ) . size ( ) ) { addModelError ( ErrorCode . P_EMPTY_FILE , 1 ) ; this . definitionFile . setFileType ( FileDefinition . EMPTY ) ; } return this . definitionFile ;
public class FastJsonKit { /** * json to Record * @ param json * @ return */ public static Record jsonToRecord ( String json ) { } }
Map < String , Object > map = jsonToMap ( json ) ; return new Record ( ) . setColumns ( map ) ;
public class S { /** * Escape for regular expression * @ param o * @ return Regex escaped data */ public static RawData escapeRegex ( Object o ) { } }
if ( null == o ) return RawData . NULL ; if ( o instanceof RawData ) return ( RawData ) o ; String s = o . toString ( ) ; return new RawData ( s . replaceAll ( "([\\/\\*\\{\\}\\<\\>\\-\\\\\\!])" , "\\\\$1" ) ) ;
public class MetadataService { /** * Removes { @ link Permission } s from the specified { @ code path } . */ private CompletableFuture < Revision > removePermissionAtPointer ( Author author , String projectName , JsonPointer path , String commitSummary ) { } }
final Change < JsonNode > change = Change . ofJsonPatch ( METADATA_JSON , new RemoveOperation ( path ) . toJsonNode ( ) ) ; return metadataRepo . push ( projectName , Project . REPO_DOGMA , author , commitSummary , change ) ;
public class AbstractStub { /** * Returns a new stub with a deadline that is after the given { @ code duration } from now . * @ since 1.0.0 * @ see CallOptions # withDeadlineAfter */ public final S withDeadlineAfter ( long duration , TimeUnit unit ) { } }
return build ( channel , callOptions . withDeadlineAfter ( duration , unit ) ) ;
public class Model { /** * Converts a named attribute to < code > java . sql . Timestamp < / code > if possible . * Acts as a validator if cannot make a conversion . * @ param attributeName name of attribute to convert to < code > java . sql . Timestamp < / code > . * @ param format format for conversion . Refer to { @ link java . text . SimpleDateFormat } * @ return message passing for custom validation message . * @ deprecated use { @ link # timestampFormat ( String , String . . . ) instead */ @ Deprecated protected static ValidationBuilder convertTimestamp ( String attributeName , String format ) { } }
return ModelDelegate . convertTimestamp ( modelClass ( ) , attributeName , format ) ;
public class CmsStaticExportRfsRule { /** * Returns the rfs name for the given locale , only used for multi - language export . < p > * @ param rfsName the original rfs name * @ param fileSeparator the file separator to use * @ return the rfs name for the given locale */ public String getLocalizedRfsName ( String rfsName , String fileSeparator ) { } }
String locRfsName = null ; // this might be too simple locRfsName = CmsStringUtil . substitute ( rfsName , fileSeparator + CmsLocaleManager . getDefaultLocale ( ) . toString ( ) + fileSeparator , fileSeparator + getName ( ) + fileSeparator ) ; return locRfsName ;
public class IdentityHashMap { /** * Reconstitutes the < tt > IdentityHashMap < / tt > instance from a stream ( i . e . , * deserializes it ) . */ private void readObject ( java . io . ObjectInputStream s ) throws java . io . IOException , ClassNotFoundException { } }
// Read in any hidden stuff s . defaultReadObject ( ) ; // Read in size ( number of Mappings ) int size = s . readInt ( ) ; if ( size < 0 ) throw new java . io . StreamCorruptedException ( "Illegal mappings count: " + size ) ; init ( capacity ( size ) ) ; // Read the keys and values , and put the mappings in the table for ( int i = 0 ; i < size ; i ++ ) { @ SuppressWarnings ( "unchecked" ) K key = ( K ) s . readObject ( ) ; @ SuppressWarnings ( "unchecked" ) V value = ( V ) s . readObject ( ) ; putForCreate ( key , value ) ; }
public class SignTask { /** * { @ inheritDoc } */ public File execute ( JnlpDependencyConfig config , File file ) throws JnlpDependencyTaskException { } }
SignConfig sign = config . getSign ( ) ; boolean signVerify = sign . isVerify ( ) ; File signedJar = new File ( file . getParentFile ( ) , file . getName ( ) + ".sign" ) ; try { ioUtil . deleteFile ( signedJar ) ; } catch ( MojoExecutionException e ) { throw new JnlpDependencyTaskException ( e . getMessage ( ) , e . getCause ( ) ) ; } verboseLog ( config , "Sign " + signedJar . getName ( ) ) ; try { signTool . sign ( sign , file , signedJar ) ; } catch ( MojoExecutionException e ) { throw new JnlpDependencyTaskException ( e . getMessage ( ) , e . getCause ( ) ) ; } getLogger ( ) . debug ( "lastModified signedJar:" + signedJar . lastModified ( ) + " not signed Jar:" + file . lastModified ( ) ) ; if ( signVerify ) { verboseLog ( config , "Verify signature of " + signedJar . getName ( ) ) ; try { signTool . verify ( sign , signedJar , config . isVerbose ( ) ) ; } catch ( MojoExecutionException e ) { throw new JnlpDependencyTaskException ( e . getMessage ( ) , e . getCause ( ) ) ; } } return signedJar ;
public class DateFormatSymbols { /** * Returns time zone strings . * The array returned by this API is a two dimensional String array and * each row contains at least following strings : * < ul > * < li > ZoneStrings [ n ] [ 0 ] - System time zone ID * < li > ZoneStrings [ n ] [ 1 ] - Long standard time display name * < li > ZoneStrings [ n ] [ 2 ] - Short standard time display name * < li > ZoneStrings [ n ] [ 3 ] - Long daylight saving time display name * < li > ZoneStrings [ n ] [ 4 ] - Short daylight saving time display name * < / ul > * When a localized display name is not available , the corresponding * array element will be < code > null < / code > . * < b > Note < / b > : ICU implements the time zone display name formatting algorithm * specified by < a href = " http : / / www . unicode . org / reports / tr35 / " > UTS # 35 Unicode * Locale Data Markup Language ( LDML ) < / a > . The algorithm supports historic * display name changes and various different types of names not available in * { @ link java . text . DateFormatSymbols # getZoneStrings ( ) } . For accessing the full * set of time zone string data used by ICU implementation , you should use * { @ link TimeZoneNames } APIs instead . * @ return the time zone strings . */ public String [ ] [ ] getZoneStrings ( ) { } }
if ( zoneStrings != null ) { return duplicate ( zoneStrings ) ; } String [ ] tzIDs = TimeZone . getAvailableIDs ( ) ; TimeZoneNames tznames = TimeZoneNames . getInstance ( validLocale ) ; tznames . loadAllDisplayNames ( ) ; NameType types [ ] = { NameType . LONG_STANDARD , NameType . SHORT_STANDARD , NameType . LONG_DAYLIGHT , NameType . SHORT_DAYLIGHT } ; long now = System . currentTimeMillis ( ) ; String [ ] [ ] array = new String [ tzIDs . length ] [ 5 ] ; for ( int i = 0 ; i < tzIDs . length ; i ++ ) { String canonicalID = TimeZone . getCanonicalID ( tzIDs [ i ] ) ; if ( canonicalID == null ) { canonicalID = tzIDs [ i ] ; } array [ i ] [ 0 ] = tzIDs [ i ] ; tznames . getDisplayNames ( canonicalID , types , now , array [ i ] , 1 ) ; } zoneStrings = array ; return zoneStrings ;
public class StyledNamingConvention { @ Override public String fromClassNameToShortComponentName ( final String className ) { } }
if ( LdiStringUtil . isEmpty ( className ) ) { throw new EmptyRuntimeException ( "className" ) ; } String s = LdiStringUtil . decapitalize ( LdiClassUtil . getShortClassName ( className ) ) ; if ( s . endsWith ( implementationSuffix ) ) { return s . substring ( 0 , s . length ( ) - implementationSuffix . length ( ) ) ; } return s ;
public class WalkModFacade { /** * Removes a plugin from the configuration file . * @ param pluginConfig * Plugin configuration to remove . * @ param recursive * If it necessary to remove the plugin from all the submodules . * @ throws Exception * if the walkmod configuration file can ' t be read . */ public void removePluginConfig ( PluginConfig pluginConfig , boolean recursive ) throws Exception { } }
long startTime = System . currentTimeMillis ( ) ; Exception exception = null ; if ( ! cfg . exists ( ) ) { init ( ) ; } userDir = new File ( System . getProperty ( "user.dir" ) ) . getAbsolutePath ( ) ; System . setProperty ( "user.dir" , options . getExecutionDirectory ( ) . getAbsolutePath ( ) ) ; try { ConfigurationManager manager = new ConfigurationManager ( cfg , false ) ; ProjectConfigurationProvider cfgProvider = manager . getProjectConfigurationProvider ( ) ; cfgProvider . removePluginConfig ( pluginConfig , recursive ) ; } catch ( Exception e ) { exception = e ; } finally { System . setProperty ( "user.dir" , userDir ) ; updateMsg ( startTime , exception ) ; }
public class ReportingApi { /** * Subscribe to statistics ( asynchronously ) * Open a subscription for the specified set of statistics . The values are returned when you request them using [ / reporting / subscriptions / { subscriptionId } ] ( / reference / workspace / Reporting / index . html # peek ) . * @ param statisticsSubscribeData The collection of statistics you want to include in your subscription . ( required ) * @ param callback The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException If fail to process the API call , e . g . serializing the request body object */ public com . squareup . okhttp . Call subscribeAsync ( StatisticsSubscribeData statisticsSubscribeData , final ApiCallback < InlineResponse2002 > callback ) throws ApiException { } }
ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = subscribeValidateBeforeCall ( statisticsSubscribeData , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < InlineResponse2002 > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class BcDSAKeyParameterGenerator { /** * Convert usage index to key usage . * @ param usage usage index . * @ return key usage . */ private static DSAKeyValidationParameters . Usage getUsage ( int usage ) { } }
if ( usage == DSAParameterGenerationParameters . DIGITAL_SIGNATURE_USAGE ) { return DSAKeyValidationParameters . Usage . DIGITAL_SIGNATURE ; } else if ( usage == DSAParameterGenerationParameters . KEY_ESTABLISHMENT_USAGE ) { return DSAKeyValidationParameters . Usage . KEY_ESTABLISHMENT ; } return DSAKeyValidationParameters . Usage . ANY ;
public class DistortImageOps { /** * Rescales the input image and writes the results into the output image . The scale * factor is determined independently of the width and height . * @ param input Input image . Not modified . * @ param output Rescaled input image . Modified . * @ param borderType Describes how pixels outside the image border should be handled . * @ param interpType Which interpolation algorithm should be used . */ @ Deprecated public static < T extends ImageBase < T > > void scale ( T input , T output , BorderType borderType , InterpolationType interpType ) { } }
PixelTransformAffine_F32 model = DistortSupport . transformScale ( output , input , null ) ; if ( input instanceof ImageGray ) { distortSingle ( ( ImageGray ) input , ( ImageGray ) output , model , interpType , borderType ) ; } else if ( input instanceof Planar ) { distortPL ( ( Planar ) input , ( Planar ) output , model , borderType , interpType ) ; }
public class AbstractJSPExtensionFactory { public ExtensionProcessor createExtensionProcessor ( IServletContext webapp ) throws Exception { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . entering ( CLASS_NAME , "createExtensionProcessor" , " app contextPath --> " + webapp . getContextPath ( ) ) ; } // d651265 createGlobalTagLibraryCache ( ) ; JspXmlExtConfig webAppConfig = createConfig ( webapp ) ; JspClassloaderContext jspClassloaderContext = createJspClassloaderContext ( webapp , webAppConfig ) ; ExtensionProcessor extensionProcessor = createProcessor ( webapp , webAppConfig , jspClassloaderContext ) ; AnnotationHelperManager aHM = new AnnotationHelperManager ( webapp ) ; com . ibm . wsspi . webcontainer . annotation . AnnotationHelperManager . addInstance ( webapp , aHM ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , "JSPExtensionFactory" , "createExtensionProcessor" , "Added AnnotationHelperManager of: " + aHM ) ; logger . logp ( Level . FINE , "JSPExtensionFactory" , "createExtensionProcessor" , "with IServletContext of: " + webapp ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . exiting ( CLASS_NAME , "createExtensionProcessor" , " app contextPath --> " + webapp . getContextPath ( ) ) ; } // d651265 return extensionProcessor ;
public class AbcGrammar { /** * BarFly - style macros - do be defined better * field - macro : : = % x6D . 3A * WSP 1 * ( WSP / VCHAR ) header - eol < p > * < tt > m : < / tt > */ Rule FieldMacro ( ) { } }
return Sequence ( String ( "m:" ) , ZeroOrMore ( WSP ( ) ) . suppressNode ( ) , OneOrMore ( FirstOf ( WSP ( ) , VCHAR ( ) ) ) . label ( "Macro" ) . suppressSubnodes ( ) , HeaderEol ( ) ) . label ( FieldMacro ) ;
public class DefaultStorageService { /** * Audit methods */ private void auditPath ( String action , String path ) { } }
AuditEvent auditEvent = new AuditEvent ( action ) ; auditEvent . getContext ( ) . put ( "PATH" , path ) ; auditor . logEvent ( auditEvent ) ;
public class ConfigurationUpdate { /** * Return the value with the given path and key if it exists . * @ param path the path * @ param key the key * @ return the value */ public Optional < String > value ( String path , String key ) { } }
return Optional . ofNullable ( paths . get ( path ) ) . flatMap ( map -> Optional . ofNullable ( map . get ( key ) ) ) ;
public class RetouchedBloomFilter { /** * Adds a list of false positive information to < i > this < / i > retouched Bloom filter . * @ param keys The list of false positive . */ public void addFalsePositive ( List < Key > keys ) { } }
if ( keys == null ) { throw new NullPointerException ( "ArrayList<Key> can not be null" ) ; } for ( Key k : keys ) { addFalsePositive ( k ) ; }
public class JobContext { /** * Get the { @ link Reducer } class for the job . * @ return the { @ link Reducer } class for the job . */ @ SuppressWarnings ( "unchecked" ) public Class < ? extends Reducer < ? , ? , ? , ? > > getReducerClass ( ) throws ClassNotFoundException { } }
return ( Class < ? extends Reducer < ? , ? , ? , ? > > ) conf . getClass ( REDUCE_CLASS_ATTR , Reducer . class ) ;
public class DataTypeParser { /** * Parses SQL - 92 Character string data types . < character string type > : : = CHARACTER [ < left paren > < length > < right paren > ] | * CHAR [ < left paren > < length > < right paren > ] | CHARACTER VARYING < left paren > < length > < right paren > | CHAR VARYING < left * paren > < length > < right paren > | VARCHAR < left paren > < length > < right paren > * @ param tokens * @ return the { @ link DataType } * @ throws ParsingException */ protected DataType parseCharStringType ( DdlTokenStream tokens ) throws ParsingException { } }
DataType dataType = null ; String typeName = null ; if ( tokens . matches ( DataTypes . DTYPE_VARCHAR ) ) { typeName = getStatementTypeName ( DataTypes . DTYPE_VARCHAR ) ; dataType = new DataType ( typeName ) ; consume ( tokens , dataType , false , DataTypes . DTYPE_VARCHAR ) ; long length = parseBracketedLong ( tokens , dataType ) ; dataType . setLength ( length ) ; } else if ( tokens . matches ( DataTypes . DTYPE_CHAR_VARYING ) ) { typeName = getStatementTypeName ( DataTypes . DTYPE_CHAR_VARYING ) ; dataType = new DataType ( typeName ) ; consume ( tokens , dataType , false , DataTypes . DTYPE_CHAR_VARYING ) ; long length = parseBracketedLong ( tokens , dataType ) ; dataType . setLength ( length ) ; } else if ( tokens . matches ( DataTypes . DTYPE_CHARACTER_VARYING ) ) { typeName = getStatementTypeName ( DataTypes . DTYPE_CHARACTER_VARYING ) ; dataType = new DataType ( typeName ) ; consume ( tokens , dataType , false , DataTypes . DTYPE_CHARACTER_VARYING ) ; long length = parseBracketedLong ( tokens , dataType ) ; dataType . setLength ( length ) ; } else if ( tokens . matches ( DataTypes . DTYPE_CHAR ) || tokens . matches ( DataTypes . DTYPE_CHARACTER ) ) { dataType = new DataType ( ) ; typeName = consume ( tokens , dataType , false ) ; // " CHARACTER " , " CHAR " , dataType . setName ( typeName ) ; long length = getDefaultLength ( ) ; if ( tokens . matches ( L_PAREN ) ) { length = parseBracketedLong ( tokens , dataType ) ; } dataType . setLength ( length ) ; } return dataType ;
public class RedisConnectionConfiguration { /** * Create a { @ link RedisClusterConfiguration } if necessary . * @ return { @ literal null } if no cluster settings are set . */ protected final RedisClusterConfiguration getClusterConfiguration ( ) { } }
if ( this . clusterConfiguration != null ) { return this . clusterConfiguration ; } if ( this . properties . getCluster ( ) == null ) { return null ; } RedisProperties . Cluster clusterProperties = this . properties . getCluster ( ) ; RedisClusterConfiguration config = new RedisClusterConfiguration ( clusterProperties . getNodes ( ) ) ; if ( clusterProperties . getMaxRedirects ( ) != null ) { config . setMaxRedirects ( clusterProperties . getMaxRedirects ( ) ) ; } if ( this . properties . getPassword ( ) != null ) { config . setPassword ( RedisPassword . of ( this . properties . getPassword ( ) ) ) ; } return config ;
public class DefaultPivotModel { /** * @ SuppressWarnings ( " unchecked " ) * private MultiKeyMap getData2 ( PivotField dataField ) { * MultiKeyMap data = new MultiKeyMap ( ) ; * List < List < Object > > rowKeys = getRowKeys ( ) ; * System . out . println ( " rowKeys . size ( ) = " + rowKeys . size ( ) ) ; * List < List < Object > > columnKeys = getColumnKeys ( ) ; * System . out . println ( " columnKeys . size ( ) = " + columnKeys . size ( ) ) ; * MultiKeyMap filtersMap = new MultiKeyMap ( ) ; * List < PivotField > rowFields = getFields ( PivotField . Area . ROW ) ; * List < PivotField > columnFields = getFields ( PivotField . Area . COLUMN ) ; * for ( List < Object > rowKey : rowKeys ) { * for ( List < Object > columnKey : columnKeys ) { * Map < Integer , Object > rowFilter = getFilter ( rowFields , rowKey ) ; * Map < Integer , Object > columnFilter = getFilter ( columnFields , columnKey ) ; * Map < Integer , Object > filter = new HashMap < Integer , Object > ( ) ; * filter . putAll ( rowFilter ) ; * filter . putAll ( columnFilter ) ; * filtersMap . put ( rowKey , columnKey , filter ) ; * List < Map < Integer , Object > > tmp = new ArrayList < Map < Integer , Object > > ( filtersMap . values ( ) ) ; * Map < Integer , Object > [ ] filters = new HashMap [ tmp . size ( ) ] ; * filters = tmp . toArray ( filters ) ; * MultiValueMap values = getValues2 ( dataField , filters ) ; * for ( List < Object > rowKey : rowKeys ) { * for ( List < Object > columnKey : columnKeys ) { * List < Object > valuesForFilter = ( List < Object > ) values . get ( filtersMap . get ( rowKey , columnKey ) ) ; * if ( ! CollectionUtils . isEmpty ( valuesForFilter ) ) { * System . out . println ( " filter = " + filter ) ; * System . out . println ( " values = " + values ) ; * System . out . println ( values . size ( ) ) ; * Object summary = PivotUtils . getSummary ( dataField , valuesForFilter ) ; * System . out . println ( " summary = " + summary ) ; * data . put ( rowKey , columnKey , summary ) ; * return data ; */ public Tree getColumnsHeaderTree ( ) { } }
if ( columnsHeaderTree == null ) { Node root = new Node ( ) ; insertChildren ( root , getFields ( PivotField . Area . COLUMN ) ) ; columnsHeaderTree = new Tree ( root ) ; } return columnsHeaderTree ;
public class SimpleQueryExecutor { /** * Execute the query . */ public synchronized QueryResult execute ( ) throws QueryException , InterruptedException { } }
// create query string String queryString = query . getText ( ) ; System . out . println ( "queryString = " + queryString ) ; // count statement String countQueryString = "SELECT COUNT(*) FROM (" + queryString + ")" ; // set query ' s input wrapper synchronized ( inputWrapper ) { try { inputWrapper . statement = conn . createStatement ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; // To change body of catch statement use File | Settings | File Templates . return null ; } inputWrapper . query = queryString ; try { inputWrapper . countStatement = conn . createStatement ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; // To change body of catch statement use File | Settings | File Templates . return null ; } inputWrapper . countQuery = countQueryString ; inputWrapper . pending = true ; inputWrapper . notify ( ) ; } synchronized ( resultWrapper ) { try { // wait for the query to complete while ( ! resultWrapper . serviced ) { resultWrapper . wait ( ) ; } if ( resultWrapper . exception != null ) { throw resultWrapper . exception ; } } catch ( InterruptedException e ) { cancel ( ) ; throw e ; } finally { resultWrapper . serviced = false ; } return new QueryResult ( resultWrapper . resultSet , resultWrapper . count , resultWrapper . executeTime ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcConnectionCurveGeometry ( ) { } }
if ( ifcConnectionCurveGeometryEClass == null ) { ifcConnectionCurveGeometryEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 101 ) ; } return ifcConnectionCurveGeometryEClass ;
public class UnitConverter { /** * 转换毫秒为带时间单位的字符串 , 单位最大到day级别 , 四舍五入 * @ param scale 小数后的精度 */ public static String toTimeUnit ( long millis , int scale ) { } }
if ( millis < MILLIS_PER_SECOND ) { return String . format ( "%4dms" , millis ) ; } if ( millis < MILLIS_PER_MINUTE ) { return String . format ( "%" + ( scale == 0 ? 2 : 3 + scale ) + '.' + scale + "fs" , millis * 1d / MILLIS_PER_SECOND ) ; } if ( millis < MILLIS_PER_HOUR ) { return String . format ( "%" + ( scale == 0 ? 2 : 3 + scale ) + '.' + scale + "fm" , millis * 1d / MILLIS_PER_MINUTE ) ; } if ( millis < MILLIS_PER_DAY ) { return String . format ( "%" + ( scale == 0 ? 2 : 3 + scale ) + '.' + scale + "fh" , millis * 1d / MILLIS_PER_HOUR ) ; } return String . format ( "%" + ( scale == 0 ? 2 : 3 + scale ) + '.' + scale + "fd" , millis * 1d / MILLIS_PER_DAY ) ;
public class Ftp { /** * 初始化连接 * @ param host 域名或IP * @ param port 端口 * @ param user 用户名 * @ param password 密码 * @ param mode 模式 * @ return this */ public Ftp init ( String host , int port , String user , String password , FtpMode mode ) { } }
final FTPClient client = new FTPClient ( ) ; client . setControlEncoding ( this . charset . toString ( ) ) ; try { // 连接ftp服务器 client . connect ( host , port ) ; // 登录ftp服务器 client . login ( user , password ) ; } catch ( IOException e ) { throw new FtpException ( e ) ; } final int replyCode = client . getReplyCode ( ) ; // 是否成功登录服务器 if ( false == FTPReply . isPositiveCompletion ( replyCode ) ) { try { client . disconnect ( ) ; } catch ( IOException e ) { // ignore } throw new FtpException ( "Login failed for user [{}], reply code is: [{}]" , user , replyCode ) ; } this . client = client ; if ( mode != null ) { setMode ( mode ) ; } return this ;
public class MolecularFormulaManipulator { /** * Returns the number of double bond equivalents in this molecule . * @ param formula The IMolecularFormula to calculate * @ return The number of DBEs * @ throws CDKException if DBE cannot be be evaluated * @ cdk . keyword DBE * @ cdk . keyword double bond equivalent */ public static double getDBE ( IMolecularFormula formula ) throws CDKException { } }
int valencies [ ] = new int [ 5 ] ; IAtomContainer ac = getAtomContainer ( formula ) ; AtomTypeFactory factory = AtomTypeFactory . getInstance ( "org/openscience/cdk/config/data/structgen_atomtypes.xml" , ac . getBuilder ( ) ) ; for ( int f = 0 ; f < ac . getAtomCount ( ) ; f ++ ) { IAtomType [ ] types = factory . getAtomTypes ( ac . getAtom ( f ) . getSymbol ( ) ) ; if ( types . length == 0 ) throw new CDKException ( "Calculation of double bond equivalents not possible due to problems with element " + ac . getAtom ( f ) . getSymbol ( ) ) ; // valencies [ ( int ) ( types [ 0 ] . getBondOrderSum ( ) + ac . getAtom ( f ) . getFormalCharge ( ) ) ] + + ; valencies [ types [ 0 ] . getBondOrderSum ( ) . intValue ( ) ] ++ ; } return 1 + ( valencies [ 4 ] ) + ( valencies [ 3 ] / 2 ) - ( valencies [ 1 ] / 2 ) ;
public class BitFileFilter { /** * Check the record locally . */ public boolean doLocalCriteria ( StringBuffer strbFilter , boolean bIncludeFileName , Vector < BaseField > vParamList ) { } }
Integer objTargetValue = ( Integer ) m_fldToCompare . getData ( ) ; if ( m_fldRecordTarget == null ) m_fldRecordTarget = this . getOwner ( ) . getField ( m_fsRecordTarget ) ; Integer objRecordValue = ( Integer ) m_fldRecordTarget . getData ( ) ; int iTargetValue = 0 ; if ( objTargetValue != null ) iTargetValue = objTargetValue . intValue ( ) ; int iRecordValue = 0 ; if ( objRecordValue != null ) iRecordValue = objRecordValue . intValue ( ) ; int iMask = BitReferenceField . ALL_TABLES ; if ( m_fldToCompare instanceof BitReferenceField ) iMask = ( ( BitReferenceField ) m_fldToCompare ) . getBitsToCheck ( ) ; boolean bCheckForMatch = true ; if ( m_bNoFilterIfAll ) if ( iTargetValue == - 1 ) bCheckForMatch = false ; // No filter if ( m_bNoFilterIfNone ) if ( iTargetValue == ~ iMask ) bCheckForMatch = false ; // No filter if ( bCheckForMatch ) if ( ( iMask & iTargetValue & iRecordValue ) == 0 ) return false ; // No match return super . doLocalCriteria ( strbFilter , bIncludeFileName , vParamList ) ;
public class GlobalSyncRedis { /** * Get cache from memory . * @ param name * @ return */ public static Cache getSyncCache ( String name ) { } }
if ( StrKit . isBlank ( name ) || ! GlobalSyncRedis . caches . containsKey ( name ) ) { return Redis . use ( ) ; } return GlobalSyncRedis . caches . get ( name ) ;
public class CompilerDef { /** * Sets compiler type . * < table width = " 100 % " border = " 1 " > * < thead > Supported compilers < / thead > * < tr > * < td > gcc ( default ) < / td > * < td > GCC C + + compiler < / td > * < / tr > * < tr > * < td > g + + < / td > * < td > GCC C + + compiler < / td > * < / tr > * < tr > * < td > c + + < / td > * < td > GCC C + + compiler < / td > * < / tr > * < tr > * < td > g77 < / td > * < td > GNU Fortran compiler < / td > * < / tr > * < tr > * < td > msvc < / td > * < td > Microsoft Visual C + + < / td > * < / tr > * < tr > * < td > bcc < / td > * < td > Borland C + + Compiler < / td > * < / tr > * < tr > * < td > msrc < / td > * < td > Microsoft Resource Compiler < / td > * < / tr > * < tr > * < td > brc < / td > * < td > Borland Resource Compiler < / td > * < / tr > * < tr > * < td > df < / td > * < td > Compaq Visual Fortran Compiler < / td > * < / tr > * < tr > * < td > midl < / td > * < td > Microsoft MIDL Compiler < / td > * < / tr > * < tr > * < td > icl < / td > * < td > Intel C + + compiler for Windows ( IA - 32 ) < / td > * < / tr > * < tr > * < td > ecl < / td > * < td > Intel C + + compiler for Windows ( IA - 64 ) < / td > * < / tr > * < tr > * < td > icc < / td > * < td > Intel C + + compiler for Linux ( IA - 32 ) < / td > * < / tr > * < tr > * < td > ecc < / td > * < td > Intel C + + compiler for Linux ( IA - 64 ) < / td > * < / tr > * < tr > * < td > CC < / td > * < td > Sun ONE C + + compiler < / td > * < / tr > * < tr > * < td > aCC < / td > * < td > HP aC + + C + + Compiler < / td > * < / tr > * < tr > * < td > os390 < / td > * < td > OS390 C Compiler < / td > * < / tr > * < tr > * < td > os400 < / td > * < td > Icc Compiler < / td > * < / tr > * < tr > * < td > sunc89 < / td > * < td > Sun C89 C Compiler < / td > * < / tr > * < tr > * < td > xlC < / td > * < td > VisualAge C Compiler < / td > * < / tr > * < tr > * < td > uic < / td > * < td > Qt user interface compiler < / td > * < / tr > * < tr > * < td > moc < / td > * < td > Qt meta - object compiler < / td > * < / tr > * < tr > * < td > wcl < / td > * < td > OpenWatcom C / C + + compiler < / td > * < / tr > * < tr > * < td > wfl < / td > * < td > OpenWatcom FORTRAN compiler < / td > * < / tr > * < / table > */ public void setName ( final CompilerEnum name ) throws BuildException { } }
if ( isReference ( ) ) { throw tooManyAttributes ( ) ; } final Compiler compiler = name . getCompiler ( ) ; setProcessor ( compiler ) ;
public class AutoTask { /** * Constructor . * @ param application The parent application . * @ param strParams The task properties . */ public void init ( App application , String strParams , Map < String , Object > properties ) { } }
m_application = ( App ) application ; if ( m_application != null ) m_application . addTask ( this , null ) ; // Add this task to the list if ( properties != null ) { if ( m_properties != null ) m_properties . putAll ( properties ) ; else m_properties = properties ; } if ( strParams != null ) { if ( m_properties == null ) m_properties = new HashMap < String , Object > ( ) ; Util . parseArgs ( m_properties , strParams ) ; } m_recordOwnerCollection = new RecordOwnerCollection ( this ) ;
public class MultipartContent { /** * Configures a field part with the given field name and value . * @ param fieldName the field name * @ param value the value * @ return a reference to this { @ link MultipartContent } instance */ public MultipartContent part ( String fieldName , String value ) { } }
return part ( fieldName , null , ContentTypes . TEXT . getAt ( 0 ) , value ) ;
public class FingerprintGenerator { /** * Generate a fingerprint for a given string * @ param str The string to fingerprint * @ return The fingerprint generated */ public static String fingerprint ( String str ) { } }
try { MessageDigest md = MessageDigest . getInstance ( "SHA-1" ) ; return byteArrayToHexString ( md . digest ( str . getBytes ( ) ) ) ; } catch ( NoSuchAlgorithmException nsae ) { LOGGER . warning ( "Unable to calculate the fingerprint for string [" + str + "]." ) ; return null ; }
public class GVRVertexBuffer { /** * Retrieves a vertex attribute as a float buffer . * The attribute name must be one of the * attributes named in the descriptor passed to the constructor . * @ param attributeName name of the attribute to update * @ throws IllegalArgumentException if attribute name not in descriptor vertex attribute is not < i > float < / i > * @ see # setFloatArray ( String , float [ ] ) * @ see # getFloatVec ( String ) */ public FloatBuffer getFloatVec ( String attributeName ) { } }
int size = getAttributeSize ( attributeName ) ; if ( size <= 0 ) { return null ; } size *= 4 * getVertexCount ( ) ; ByteBuffer buffer = ByteBuffer . allocateDirect ( size ) . order ( ByteOrder . nativeOrder ( ) ) ; FloatBuffer data = buffer . asFloatBuffer ( ) ; if ( ! NativeVertexBuffer . getFloatVec ( getNative ( ) , attributeName , data , 0 , 0 ) ) { throw new IllegalArgumentException ( "Attribute name " + attributeName + " cannot be accessed" ) ; } return data ;
public class Locations { /** * Set of { @ link Location locations } where the class file of the supplied class can be found . < br > * Note that this is really a set , since the same ( or in bad cases a different version of the same ) class * might be found within the classpath several times . * @ param clazz A { @ link Class } to import * @ return { @ link Location Locations } of the respective class file within the classpath */ @ PublicAPI ( usage = ACCESS ) public static Set < Location > ofClass ( Class < ? > clazz ) { } }
return getLocationsOf ( asResourceName ( clazz . getName ( ) ) + ".class" ) ;
public class AbcGrammar { /** * userdef : : = userdef - symbol * WSP " = " * WSP ( long - gracing / chord - or - text ) */ Rule Userdef ( ) { } }
return Sequence ( UserdefSymbol ( ) , ZeroOrMoreS ( WSP ( ) ) . suppressNode ( ) , String ( "=" ) , ZeroOrMoreS ( WSP ( ) ) . suppressNode ( ) , FirstOfS ( // SequenceS ( String ( " ! " ) , LongGracing ( ) , String ( " ! " ) ) , Gracing ( ) , ChordOrText ( ) ) ) . label ( Userdef ) ;
public class LeastRecentlyUsedCache { /** * Fetches an object from the cache . Synchronization at cache level is kept to minimum . The provider is called upon cache miss . * @ param key the identity of the object * @ param callable a callable to provide the object upon cache miss * @ return the object requested * @ throws Exception if the provider fails to create a new object */ public V fetch ( K key , Callable < V > callable ) throws Exception { } }
return locate ( key ) . get ( callable ) ;
public class SessionCookieManager { /** * / * package */ List < Cookie > getCookies ( String domain ) { } }
cookieManager . removeExpiredCookie ( ) ; String cookies = cookieManager . getCookie ( domain ) ; List < Cookie > result = new LinkedList < Cookie > ( ) ; if ( cookies == null ) { return result ; } for ( String cookie : cookies . split ( COOKIE_SEPARATOR ) ) { String [ ] cookieValues = cookie . split ( "=" , 2 ) ; if ( cookieValues . length >= 2 ) { result . add ( new Cookie ( cookieValues [ 0 ] . trim ( ) , cookieValues [ 1 ] , domain , null , null ) ) ; } } return result ;
public class EventBuilder { /** * Sets the culprit in the event based on a { @ link StackTraceElement } . * @ param frame stack frame during which the event was captured . * @ return the current { @ code EventBuilder } for chained calls . * @ deprecated Culprit has been removed in favor of Transaction . */ @ Deprecated public EventBuilder withCulprit ( StackTraceElement frame ) { } }
return withCulprit ( buildCulpritString ( frame . getClassName ( ) , frame . getMethodName ( ) , frame . getFileName ( ) , frame . getLineNumber ( ) ) ) ;
public class RTreeIndexExtension { /** * { @ inheritDoc } */ @ Override public void createIsEmptyFunction ( ) { } }
createFunction ( IS_EMPTY_FUNCTION , new GeometryFunction ( ) { @ Override public Object execute ( GeoPackageGeometryData data ) { return data == null || data . isEmpty ( ) || data . getGeometry ( ) == null ; } } ) ;
public class JmxTransformer { /** * Handy method which runs the JmxProcess */ public void executeStandalone ( JmxProcess process ) throws Exception { } }
this . masterServersList = process . getServers ( ) ; this . serverScheduler . start ( ) ; this . processServersIntoJobs ( ) ; // Sleep for 10 seconds to wait for jobs to complete . // There should be a better way , but it seems that way isn ' t working // right now . Thread . sleep ( MILLISECONDS . convert ( 10 , SECONDS ) ) ;
public class GlobalTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setType ( String newType ) { } }
String oldType = type ; type = newType ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , DroolsPackage . GLOBAL_TYPE__TYPE , oldType , type ) ) ;
public class CreatorUtils { /** * Returns the first occurence of the annotation found on the types * @ param annotation the annotation * @ param hasAnnotationsList the types * @ return the first occurence of the annotation found on the types * @ param < T > a T object . */ public static < T extends Annotation > Optional < T > getAnnotation ( Class < T > annotation , List < ? extends HasAnnotations > hasAnnotationsList ) { } }
for ( HasAnnotations accessor : hasAnnotationsList ) { if ( accessor . isAnnotationPresent ( annotation ) ) { return Optional . of ( accessor . getAnnotation ( annotation ) ) ; } } return Optional . absent ( ) ;
public class ClusKernel { /** * Make this cluster older bei weighting it and add to this cluster the * given cluster . If we want to add somethin to the cluster , but don ' t * want to weight it , we should use the function < code > add ( Cluster ) < / code > . * @ param other The other cluster to be added to this one . * @ param timeDifference The time elapsed between the last update of the * < code > Entry < / code > to which this cluster belongs and the update that * caused the call to this function . * @ param negLambda A parameter needed to weight the cluster . * @ see # add ( tree . Kernel ) */ protected void aggregate ( ClusKernel other , long timeDifference , double negLambda ) { } }
makeOlder ( timeDifference , negLambda ) ; add ( other ) ;
public class EthiopianCalendar { /** * / * [ deutsch ] * < p > Erzeugt ein neues & auml ; thiopisches Kalenderdatum . < / p > * @ param era Ethiopian era * @ param yearOfEra Ethiopian year of era in the range 1-9999 ( 1-15499 if amete alem ) * @ param month Ethiopian month * @ param dayOfMonth Ethiopian day of month in the range 1-30 * @ return new instance of { @ code EthiopianCalendar } * @ throws IllegalArgumentException in case of any inconsistencies * @ since 3.11/4.8 */ public static EthiopianCalendar of ( EthiopianEra era , int yearOfEra , EthiopianMonth month , int dayOfMonth ) { } }
return EthiopianCalendar . of ( era , yearOfEra , month . getValue ( ) , dayOfMonth ) ;
public class AbstractCache { /** * Enable caching to the phone ' s internal storage or SD card . * @ param context * the current context * @ param storageDevice * where to store the cached files , either { @ link # DISK _ CACHE _ INTERNAL } or * { @ link # DISK _ CACHE _ SDCARD } ) * @ return */ public boolean enableDiskCache ( Context context , int storageDevice ) { } }
Context appContext = context . getApplicationContext ( ) ; String rootDir = null ; if ( storageDevice == DISK_CACHE_SDCARD && Environment . MEDIA_MOUNTED . equals ( Environment . getExternalStorageState ( ) ) ) { // SD - card available rootDir = Environment . getExternalStorageDirectory ( ) . getAbsolutePath ( ) + "/Android/data/" + appContext . getPackageName ( ) + "/cache" ; } else { File internalCacheDir = appContext . getCacheDir ( ) ; // apparently on some configurations this can come back as null if ( internalCacheDir == null ) { return ( isDiskCacheEnabled = false ) ; } rootDir = internalCacheDir . getAbsolutePath ( ) ; } setRootDir ( rootDir ) ; File outFile = new File ( diskCacheDirectory ) ; if ( outFile . mkdirs ( ) ) { File nomedia = new File ( diskCacheDirectory , ".nomedia" ) ; try { nomedia . createNewFile ( ) ; } catch ( IOException e ) { Log . e ( LOG_TAG , "Failed creating .nomedia file" ) ; } } isDiskCacheEnabled = outFile . exists ( ) ; if ( ! isDiskCacheEnabled ) { Log . w ( LOG_TAG , "Failed creating disk cache directory " + diskCacheDirectory ) ; } else { // Log . d ( name , " enabled write through to " + diskCacheDirectory ) ; // sanitize disk cache // Log . d ( name , " sanitize DISK cache " ) ; sanitizeDiskCache ( ) ; } return isDiskCacheEnabled ;
public class CmsWindowExtension { /** * Tries to open a new browser window . < p > * If openning the window fails , the given callback is called . < p > * @ param location the URL to open in the new window * @ param target the target window name * @ param onFailure the callback to call if opening the window fails */ public void open ( String location , String target , final Runnable onFailure ) { } }
String id = RandomStringUtils . randomAlphanumeric ( 16 ) ; m_callbackMap . put ( id , new FutureCallback < Boolean > ( ) { public void onFailure ( Throwable t ) { } public void onSuccess ( Boolean result ) { if ( ! result . booleanValue ( ) ) { onFailure . run ( ) ; } } } ) ; getRpcProxy ( I_CmsWindowClientRpc . class ) . open ( location , target , id ) ;
public class LogLog { /** * This method is used to output log4j internal debug * statements . Output goes to < code > System . out < / code > . */ public static void debug ( String msg ) { } }
if ( debugEnabled && ! quietMode ) { System . out . println ( PREFIX + msg ) ; }
public class ModelRegistry { /** * Adds the type [ ] - & t ; model relationship to the registry . * @ param types must not be < code > null < / code > . * @ param source must not be < code > null < / code > . */ public void add ( String [ ] types , OsgiModelSource < ? > source ) { } }
for ( String resourceType : types ) { this . typeNameToModelSourcesMap . put ( resourceType , source ) ; } clearLookupCaches ( ) ;
public class JobTracker { /** * lock " is under JobTracker lock to avoid deadlocks . */ synchronized public Collection < TaskTrackerStatus > blacklistedTaskTrackers ( ) { } }
Collection < TaskTrackerStatus > blacklistedTrackers = new ArrayList < TaskTrackerStatus > ( ) ; synchronized ( taskTrackers ) { for ( TaskTracker tt : taskTrackers . values ( ) ) { TaskTrackerStatus status = tt . getStatus ( ) ; if ( faultyTrackers . isBlacklisted ( status . getHost ( ) ) ) { blacklistedTrackers . add ( status ) ; } } } return blacklistedTrackers ;
public class HtmlTool { /** * Transforms the given HTML content by adding IDs to all heading elements ( { @ code h1-6 } ) that * do not have one . * IDs on heading elements are used to indicate positions within a HTML page in HTML5 . If a * heading tag without an { @ code id } is found , its " slug " is generated automatically based on * the heading contents and used as the ID . * Note that the algorithm also modifies existing IDs that have symbols not allowed in CSS * selectors , e . g . " : " , " . " , etc . The symbols are removed . * @ param content * HTML content to modify * @ return HTML content with all heading elements having { @ code id } attributes . If all headings * were with IDs already , the original content is returned . * @ since 1.0 */ public String ensureHeadingIds ( String content , String idSeparator ) { } }
Element body = parseContent ( content ) ; // first find all existing IDs ( to avoid generating duplicates ) List < Element > idElems = body . select ( "*[id]" ) ; Set < String > ids = new HashSet < String > ( ) ; boolean modified = false ; for ( Element idElem : idElems ) { // fix all existing IDs - remove colon and other symbols which mess up jQuery String id = idElem . id ( ) ; idElem . attr ( "id" , adaptSlug ( id , idSeparator ) ) ; modified = true ; ids . add ( idElem . id ( ) ) ; } List < String > headNoIds = concat ( HEADINGS , ":not([id])" , true ) ; // select all headings that do not have an ID List < Element > headingsNoId = body . select ( StringUtil . join ( headNoIds , ", " ) ) ; if ( ! headingsNoId . isEmpty ( ) || modified ) { for ( Element heading : headingsNoId ) { String headingText = heading . text ( ) ; String headingSlug = slug ( headingText , idSeparator ) ; // also limit slug to 50 symbols if ( headingSlug . length ( ) > 50 ) { headingSlug = headingSlug . substring ( 0 , 50 ) ; } String headingId = generateUniqueId ( ids , headingSlug ) ; heading . attr ( "id" , headingId ) ; } return body . html ( ) ; } else { // nothing to update return content ; }
public class RebalanceUtils { /** * An interim cluster ought to be a super set of current cluster . I . e . , it * ought to either be the same as current cluster ( every partition is mapped * to the same node of current & interim ) , or it ought to have more nodes * ( possibly in new zones ) without partitions . * @ param currentCluster * @ param interimCluster */ public static void validateCurrentInterimCluster ( final Cluster currentCluster , final Cluster interimCluster ) { } }
validateClusterPartitionCounts ( currentCluster , interimCluster ) ; validateClusterNodeState ( currentCluster , interimCluster ) ; validateClusterPartitionState ( currentCluster , interimCluster ) ; return ;
public class HashCalculator { /** * / * - - - Private static methods - - - */ private HashCalculationResult hashBuckets ( byte [ ] fileWithoutSpaces , double bucketSize ) throws IOException { } }
// int ( bucket _ size ) will round down the bucket _ size : IE : 1.2 - > 1.0 int bucketIntSize = ( int ) bucketSize ; // Get bytes and calculate sha1 byte [ ] mostSigBytes = Arrays . copyOfRange ( fileWithoutSpaces , 0 , bucketIntSize ) ; int length = fileWithoutSpaces . length ; byte [ ] leastSigBytes = Arrays . copyOfRange ( fileWithoutSpaces , length - bucketIntSize , length ) ; String fullFileHash = calculateByteArraySHA1 ( fileWithoutSpaces ) ; String mostSigBitsHash = calculateByteArraySHA1 ( mostSigBytes ) ; String leastSigBitsHash = calculateByteArraySHA1 ( leastSigBytes ) ; return new HashCalculationResult ( fullFileHash , mostSigBitsHash , leastSigBitsHash ) ;