signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FilterManager { /** * This method initialises the filter manager with the supplied
* transaction configuration .
* @ param txn The transaction name
* @ param btc The configuration */
public void init ( String txn , TransactionConfig btc ) { } } | FilterProcessor fp = null ; if ( btc . getFilter ( ) != null ) { fp = new FilterProcessor ( txn , btc ) ; } synchronized ( filterMap ) { // Check if old filter processor needs to be removed
FilterProcessor oldfp = filterMap . get ( txn ) ; if ( oldfp != null ) { globalExclusionFilters . remove ( oldfp ) ; btxnFilters . remove ( oldfp ) ; } if ( fp != null ) { // Add new filter processor
filterMap . put ( txn , fp ) ; if ( fp . isIncludeAll ( ) ) { globalExclusionFilters . add ( fp ) ; } else { btxnFilters . add ( fp ) ; } } else { filterMap . remove ( txn ) ; } } |
public class DescribeConfigurationSetRequest { /** * A list of configuration set attributes to return .
* @ return A list of configuration set attributes to return .
* @ see ConfigurationSetAttribute */
public java . util . List < String > getConfigurationSetAttributeNames ( ) { } } | if ( configurationSetAttributeNames == null ) { configurationSetAttributeNames = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return configurationSetAttributeNames ; |
public class GroupFilter { /** * { @ inheritDoc } */
@ Override public GroupFilter and ( GroupFilter otherFilter ) { } } | checkNotNull ( otherFilter , "Other filter must be not a null" ) ; evaluation = new AndEvaluation < > ( evaluation , otherFilter , GroupMetadata :: getId ) ; return this ; |
public class ReportalUtil { /** * Shortlist variables which match a given regex . Returns empty empty list , if no
* eligible variable is found */
public static List < Variable > getVariablesByRegex ( final Collection < Variable > variables , final String regex ) { } } | final List < Variable > shortlistedVariables = new ArrayList < > ( ) ; if ( variables != null && regex != null ) { for ( final Variable var : variables ) { if ( var . getTitle ( ) . matches ( regex ) ) { shortlistedVariables . add ( var ) ; } } } return shortlistedVariables ; |
public class Blocking { /** * Blocks on an { @ link Observable } and returns a single event or throws an { @ link Exception } .
* Note that when this method is used , only the first item emitted will be returned . The caller needs to make
* sure that the source { @ link Observable } only ever returns a single item ( or none ) . The { @ link BlockingObservable }
* code applies different operators like single , last , first and more , these need to be applied manually .
* This code is based on { @ link BlockingObservable # blockForSingle } , but does not wait forever . Instead , it
* utilizes the internal { @ link CountDownLatch } to optimize the timeout case , with less GC and CPU overhead
* than chaining in an { @ link Observable # timeout ( long , TimeUnit ) } operator .
* If an error happens inside the { @ link Observable } , it will be raised as an { @ link Exception } . If the timeout
* kicks in , a { @ link TimeoutException } nested in a { @ link RuntimeException } is thrown to be fully compatible
* with the { @ link Observable # timeout ( long , TimeUnit ) } behavior .
* @ param observable the source { @ link Observable }
* @ param timeout the maximum timeout before an exception is thrown .
* @ param tu the timeout unit .
* @ param < T > the type returned .
* @ return the extracted value from the { @ link Observable } or throws in an error case . */
public static < T > T blockForSingle ( final Observable < ? extends T > observable , final long timeout , final TimeUnit tu ) { } } | final CountDownLatch latch = new CountDownLatch ( 1 ) ; TrackingSubscriber < T > subscriber = new TrackingSubscriber < T > ( latch ) ; Subscription subscription = observable . subscribe ( subscriber ) ; try { if ( ! latch . await ( timeout , tu ) ) { if ( ! subscription . isUnsubscribed ( ) ) { subscription . unsubscribe ( ) ; } throw new RuntimeException ( new TimeoutException ( ) ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new RuntimeException ( "Interrupted while waiting for subscription to complete." , e ) ; } if ( subscriber . returnException ( ) != null ) { if ( subscriber . returnException ( ) instanceof RuntimeException ) { throw ( RuntimeException ) subscriber . returnException ( ) ; } else { throw new RuntimeException ( subscriber . returnException ( ) ) ; } } return subscriber . returnItem ( ) ; |
public class PermissionCheckService { /** * Check permission for role and privilege key only .
* @ param authentication the authentication
* @ param privilege the privilege key
* @ return true if permitted */
public boolean hasPermission ( Authentication authentication , Object privilege ) { } } | return checkRole ( authentication , privilege , true ) || checkPermission ( authentication , null , privilege , false , true ) ; |
public class MessageFlyweight { /** * region SET Overloads */
public MessageFlyweight set ( boolean value ) { } } | buffer . putByte ( index + offset , ( byte ) ( value ? 1 : 0 ) ) ; index += Bits . BYTE_SIZE_IN_BYTES ; return this ; |
public class CmsAliasEditValidationRequest { /** * Sets the edited data list . < p >
* @ param data the edited data list */
public void setEditedData ( List < CmsAliasTableRow > data ) { } } | m_editedData = new ArrayList < CmsAliasTableRow > ( ) ; m_editedData . addAll ( data ) ; |
public class JCGLTextureFormats { /** * Check that the texture is of a depth + stencil - renderable format .
* @ param t The texture
* @ throws JCGLExceptionFormatError If the texture is not of the correct
* format */
public static void checkDepthStencilRenderableTexture2D ( final JCGLTextureFormat t ) throws JCGLExceptionFormatError { } } | if ( isDepthRenderable ( t ) && isStencilRenderable ( t ) ) { return ; } final String m = String . format ( "Format %s is not depth+stencil-renderable" , t ) ; assert m != null ; throw new JCGLExceptionFormatError ( m ) ; |
public class PeepholeFoldConstants { /** * Try to fold arithmetic binary operators */
private Node tryFoldArithmeticOp ( Node n , Node left , Node right ) { } } | Node result = performArithmeticOp ( n . getToken ( ) , left , right ) ; if ( result != null ) { result . useSourceInfoIfMissingFromForTree ( n ) ; reportChangeToEnclosingScope ( n ) ; n . replaceWith ( result ) ; return result ; } return n ; |
public class CheckedExceptionsFactory { /** * Constructs and initializes a new { @ link CloneNotSupportedException } with the given { @ link String message }
* formatted with the given { @ link Object [ ] arguments } .
* @ param message { @ link String } describing the { @ link CloneNotSupportedException exception } .
* @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } .
* @ return a new { @ link CloneNotSupportedException } with the given { @ link String message } .
* @ see # newCloneNotSupportedException ( Throwable , String , Object . . . )
* @ see java . lang . CloneNotSupportedException */
public static CloneNotSupportedException newCloneNotSupportedException ( String message , Object ... args ) { } } | return newCloneNotSupportedException ( null , message , args ) ; |
public class DirectUpdateDoublesSketchR { /** * Checks the validity of the direct memory capacity assuming n , k .
* @ param k the given value of k
* @ param n the given value of n
* @ param memCapBytes the current memory capacity in bytes */
static void checkDirectMemCapacity ( final int k , final long n , final long memCapBytes ) { } } | final int reqBufBytes = getUpdatableStorageBytes ( k , n ) ; if ( memCapBytes < reqBufBytes ) { throw new SketchesArgumentException ( "Possible corruption: Memory capacity too small: " + memCapBytes + " < " + reqBufBytes ) ; } |
public class ExtendedBundle { /** * < p > isImportingPAXWicketAPI . < / p >
* @ return < code > true < / code > if this bundle imports anything from the org . ops4j . pax . wicket . api Namespace */
public boolean isImportingPAXWicketAPI ( ) { } } | // Check if there is a package wiring ( either static or dynamic )
BundleWiring bundleWiring = bundle . adapt ( BundleWiring . class ) ; boolean hasPackageImport = hasWireMatchingFilter ( bundleWiring . getRequiredWires ( OSGI_WIRING_PACKAGE_NAMESPACE ) , bundleContext . importPAXWicketAPI ) ; // check if there is an require bundle wire . . .
return hasPackageImport || hasWireMatchingFilter ( bundleWiring . getRequiredWires ( OSGI_WIRING_BUNDLE_NAMESPACE ) , bundleContext . requirePAXWicketBundle ) ; |
public class StyleUtilities { /** * Get the { @ link PointSymbolizer } from the given rule .
* @ param rule the rule to check for symbolizers .
* @ return the first symbolizer found . */
public static PointSymbolizer pointSymbolizerFromRule ( Rule rule ) { } } | List < Symbolizer > symbolizers = rule . symbolizers ( ) ; PointSymbolizer pointSymbolizer = null ; for ( Symbolizer symbolizer : symbolizers ) { if ( symbolizer instanceof PointSymbolizer ) { pointSymbolizer = ( PointSymbolizer ) symbolizer ; break ; } } if ( pointSymbolizer == null ) { throw new IllegalArgumentException ( ) ; } return pointSymbolizer ; |
public class InstanceClient { /** * Updates the Shielded Instance config for an instance . You can only use this method on a stopped
* instance . This method supports PATCH semantics and uses the JSON merge patch format and
* processing rules .
* < p > Sample code :
* < pre > < code >
* try ( InstanceClient instanceClient = InstanceClient . create ( ) ) {
* ProjectZoneInstanceName instance = ProjectZoneInstanceName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ INSTANCE ] " ) ;
* ShieldedInstanceConfig shieldedInstanceConfigResource = ShieldedInstanceConfig . newBuilder ( ) . build ( ) ;
* List & lt ; String & gt ; fieldMask = new ArrayList & lt ; & gt ; ( ) ;
* Operation response = instanceClient . updateShieldedInstanceConfigInstance ( instance , shieldedInstanceConfigResource , fieldMask ) ;
* < / code > < / pre >
* @ param instance Name or id of the instance scoping this request .
* @ param shieldedInstanceConfigResource A set of Shielded Instance options .
* @ param fieldMask The fields that should be serialized ( even if they have empty values ) . If the
* containing message object has a non - null fieldmask , then all the fields in the field mask
* ( and only those fields in the field mask ) will be serialized . If the containing object does
* not have a fieldmask , then only non - empty fields will be serialized .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation updateShieldedInstanceConfigInstance ( ProjectZoneInstanceName instance , ShieldedInstanceConfig shieldedInstanceConfigResource , List < String > fieldMask ) { } } | UpdateShieldedInstanceConfigInstanceHttpRequest request = UpdateShieldedInstanceConfigInstanceHttpRequest . newBuilder ( ) . setInstance ( instance == null ? null : instance . toString ( ) ) . setShieldedInstanceConfigResource ( shieldedInstanceConfigResource ) . addAllFieldMask ( fieldMask ) . build ( ) ; return updateShieldedInstanceConfigInstance ( request ) ; |
public class CircularQueueCaptureQueriesListener { /** * Log all captured DELETE queries */
public void logDeleteQueriesForCurrentThread ( ) { } } | List < String > queries = getDeleteQueriesForCurrentThread ( ) . stream ( ) . map ( CircularQueueCaptureQueriesListener :: formatQueryAsSql ) . collect ( Collectors . toList ( ) ) ; ourLog . info ( "Delete Queries:\n{}" , String . join ( "\n" , queries ) ) ; |
public class MetaClassImpl { /** * Hook to deal with the case of MissingProperty for static properties . The method will look attempt to look up
* " propertyMissing " handlers and invoke them otherwise thrown a MissingPropertyException
* @ param instance The instance
* @ param propertyName The name of the property
* @ param optionalValue The value in the case of a setter
* @ param isGetter True if its a getter
* @ return The value in the case of a getter or a MissingPropertyException */
protected Object invokeStaticMissingProperty ( Object instance , String propertyName , Object optionalValue , boolean isGetter ) { } } | MetaClass mc = instance instanceof Class ? registry . getMetaClass ( ( Class ) instance ) : this ; if ( isGetter ) { MetaMethod propertyMissing = mc . getMetaMethod ( STATIC_PROPERTY_MISSING , GETTER_MISSING_ARGS ) ; if ( propertyMissing != null ) { return propertyMissing . invoke ( instance , new Object [ ] { propertyName } ) ; } } else { MetaMethod propertyMissing = mc . getMetaMethod ( STATIC_PROPERTY_MISSING , SETTER_MISSING_ARGS ) ; if ( propertyMissing != null ) { return propertyMissing . invoke ( instance , new Object [ ] { propertyName , optionalValue } ) ; } } if ( instance instanceof Class ) { throw new MissingPropertyException ( propertyName , ( Class ) instance ) ; } throw new MissingPropertyException ( propertyName , theClass ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcValveType ( ) { } } | if ( ifcValveTypeEClass == null ) { ifcValveTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 631 ) ; } return ifcValveTypeEClass ; |
public class AbstractResponseFuture { /** * 设置正常返回结果
* @ param result 正常返回值 */
public void setSuccess ( V result ) { } } | if ( this . isCancelled ( ) ) { this . releaseIfNeed ( result ) ; } if ( setSuccess0 ( result ) ) { notifyListeners ( ) ; return ; } throw new IllegalStateException ( "complete already: " + this ) ; |
public class ConnectionParams { /** * Creates a new ConnectionParams object filled with key - value pairs serialized
* as a string .
* @ param line a string with serialized key - value pairs as
* " key1 = value1 ; key2 = value2 ; . . . " Example :
* " Key1 = 123 ; Key2 = ABC ; Key3 = 2016-09-16T00:00:00.00Z "
* @ return a new ConnectionParams object .
* @ see StringValueMap # fromString ( String ) */
public static ConnectionParams fromString ( String line ) { } } | StringValueMap map = StringValueMap . fromString ( line ) ; return new ConnectionParams ( map ) ; |
public class UtilFile { /** * Calculate the file absolute URI . Something like for example file : / / / opt / sybhttpd / localhost . drives / SATA _ DISK / ztesting / 02 . mp3
* Appends file : / / if it ' s not included in the file path */
public static String getFileAbsoluteUri ( String filePath ) { } } | String fileAbsoluteUri = filePath ; if ( ! filePath . startsWith ( "file://" ) ) { fileAbsoluteUri = "file://" . concat ( filePath ) ; } return fileAbsoluteUri ; |
public class MongoDBBasicOperations { /** * Add or update an existing object in the data store .
* @ param object
* the object to save
* @ return < code > true < / code > if saved , < code > false < / code > otherwise */
@ Override public boolean addOrUpdate ( T object ) { } } | if ( object == null ) { return false ; } X primaryID = getPrimaryID ( object ) ; if ( primaryID == null ) { this . mongoTemplate . save ( object ) ; return true ; } if ( ! allowEmptyOrZeroID ( ) && AssertUtils . isEmpty ( primaryID ) ) { return false ; } this . mongoTemplate . save ( object ) ; return true ; |
public class AstUtil { /** * Return true only if the MethodCallExpression represents a method call for any one of the specified method
* objects ( receivers ) and any one of the method names . Optionally , you can restrict it to a method call with
* a certain number of arguments .
* @ param methodCall
* the method call object
* @ param methodObjects
* a list of receivers , such as [ ' this ' , ' super ' ]
* @ param methodNames
* a list of method names
* @ param numArguments
* optionally , require a certain number of arguments
* @ return
* as described */
public static boolean isMethodCall ( MethodCallExpression methodCall , List < String > methodObjects , List < String > methodNames , Integer numArguments ) { } } | if ( methodNames != null ) { for ( String name : methodNames ) { if ( methodObjects != null ) { for ( String objectName : methodObjects ) { boolean match = isMethodCallOnObject ( methodCall , objectName ) && isMethodNamed ( methodCall , name ) ; if ( match && numArguments == null ) { return true ; } else if ( match && getMethodArguments ( methodCall ) . size ( ) == numArguments ) { return true ; } } } } } return false ; |
public class RateLimiter { /** * Creates a { @ code RateLimiter } with the specified stable throughput , given as
* " permits per second " ( commonly referred to as < i > QPS < / i > , queries per second ) , and a < i > warmup
* period < / i > , during which the { @ code RateLimiter } smoothly ramps up its rate , until it reaches
* its maximum rate at the end of the period ( as long as there are enough requests to saturate
* it ) . Similarly , if the { @ code RateLimiter } is left < i > unused < / i > for a duration of
* { @ code warmupPeriod } , it will gradually return to its " cold " state , i . e . it will go through the
* same warming up process as when it was first created .
* < p > The returned { @ code RateLimiter } is intended for cases where the resource that actually
* fulfills the requests ( e . g . , a remote server ) needs " warmup " time , rather than being
* immediately accessed at the stable ( maximum ) rate .
* < p > The returned { @ code RateLimiter } starts in a " cold " state ( i . e . the warmup period will
* follow ) , and if it is left unused for long enough , it will return to that state .
* @ param permitsPerSecond the rate of the returned { @ code RateLimiter } , measured in how many
* permits become available per second
* @ param warmupPeriod the duration of the period where the { @ code RateLimiter } ramps up its rate ,
* before reaching its stable ( maximum ) rate
* @ param unit the time unit of the warmupPeriod argument
* @ throws IllegalArgumentException if { @ code permitsPerSecond } is negative or zero or
* { @ code warmupPeriod } is negative */
public static RateLimiter create ( double permitsPerSecond , long warmupPeriod , TimeUnit unit ) { } } | checkArgument ( warmupPeriod >= 0 , "warmupPeriod must not be negative: %s" , warmupPeriod ) ; return create ( SleepingStopwatch . createFromSystemTimer ( ) , permitsPerSecond , warmupPeriod , unit , 3.0 ) ; |
public class SubWriterHolderWriter { /** * Add the class content tree .
* @ param contentTree content tree to which the class content will be added
* @ param classContentTree class content tree which will be added to the content tree */
public void addClassContentTree ( Content contentTree , Content classContentTree ) { } } | if ( configuration . allowTag ( HtmlTag . MAIN ) ) { mainTree . addContent ( classContentTree ) ; contentTree . addContent ( mainTree ) ; } else { contentTree . addContent ( classContentTree ) ; } |
public class Dependencies { /** * Find an entry in the forbidden list by package name .
* @ param packageName
* Name to find .
* @ return Package or < code > null < / code > if no entry with the given name was found . */
public final Package < NotDependsOn > findForbiddenByName ( final String packageName ) { } } | final List < Package < NotDependsOn > > list = getForbidden ( ) ; for ( final Package < NotDependsOn > pkg : list ) { if ( pkg . getName ( ) . equals ( packageName ) ) { return pkg ; } } return null ; |
public class ProcessTask { /** * Processes the NVD CVE XML file and imports the data into the DB .
* @ throws UpdateException thrown if there is an error loading the data into
* the database */
private void processFiles ( ) throws UpdateException { } } | LOGGER . info ( "Processing Started for NVD CVE - {}" , downloadTask . getNvdCveInfo ( ) . getId ( ) ) ; final long startProcessing = System . currentTimeMillis ( ) ; try { importJSON ( downloadTask . getFile ( ) ) ; cveDB . commit ( ) ; properties . save ( downloadTask . getNvdCveInfo ( ) ) ; } catch ( ParserConfigurationException | SQLException | DatabaseException | ClassNotFoundException | IOException ex ) { throw new UpdateException ( ex ) ; } finally { downloadTask . cleanup ( ) ; } LOGGER . info ( "Processing Complete for NVD CVE - {} ({} ms)" , downloadTask . getNvdCveInfo ( ) . getId ( ) , System . currentTimeMillis ( ) - startProcessing ) ; |
public class FieldInfo { /** * Sort in order of class name then field name .
* @ param other
* the other FieldInfo object to compare to .
* @ return the result of comparison . */
@ Override public int compareTo ( final FieldInfo other ) { } } | final int diff = declaringClassName . compareTo ( other . declaringClassName ) ; if ( diff != 0 ) { return diff ; } return name . compareTo ( other . name ) ; |
public class DeepLearning { /** * Train a Deep Learning neural net model
* @ param model Input model ( e . g . , from initModel ( ) , or from a previous training run )
* @ return Trained model */
public final DeepLearningModel trainModel ( DeepLearningModel model ) { } } | Frame validScoreFrame = null ; Frame train , trainScoreFrame ; try { lock_data ( ) ; if ( checkpoint == null && ! quiet_mode ) logStart ( ) ; // if checkpoint is given , some Job ' s params might be uninitialized ( but the restarted model ' s parameters are correct )
if ( model == null ) { model = UKV . get ( dest ( ) ) ; } model . write_lock ( self ( ) ) ; final DeepLearning mp = model . model_info ( ) . get_params ( ) ; // use the model ' s parameters for everything below - NOT the job ' s parameters ( can be different after checkpoint restart )
prepareValidationWithModel ( model ) ; final long model_size = model . model_info ( ) . size ( ) ; if ( ! quiet_mode ) Log . info ( "Number of model parameters (weights/biases): " + String . format ( "%,d" , model_size ) ) ; train = model . model_info ( ) . data_info ( ) . _adaptedFrame ; if ( mp . force_load_balance ) train = updateFrame ( train , reBalance ( train , mp . replicate_training_data ) ) ; if ( mp . classification && mp . balance_classes ) { float [ ] trainSamplingFactors = new float [ train . lastVec ( ) . domain ( ) . length ] ; // leave initialized to 0 - > will be filled up below
if ( class_sampling_factors != null ) { if ( class_sampling_factors . length != train . lastVec ( ) . domain ( ) . length ) throw new IllegalArgumentException ( "class_sampling_factors must have " + train . lastVec ( ) . domain ( ) . length + " elements" ) ; trainSamplingFactors = class_sampling_factors . clone ( ) ; // clone : don ' t modify the original
} train = updateFrame ( train , sampleFrameStratified ( train , train . lastVec ( ) , trainSamplingFactors , ( long ) ( mp . max_after_balance_size * train . numRows ( ) ) , mp . seed , true , false ) ) ; model . setModelClassDistribution ( new MRUtils . ClassDist ( train . lastVec ( ) ) . doAll ( train . lastVec ( ) ) . rel_dist ( ) ) ; } model . training_rows = train . numRows ( ) ; trainScoreFrame = updateFrame ( train , sampleFrame ( train , mp . score_training_samples , mp . seed ) ) ; // training scoring dataset is always sampled uniformly from the training dataset
if ( ! quiet_mode ) Log . info ( "Number of chunks of the training data: " + train . anyVec ( ) . nChunks ( ) ) ; if ( validation != null ) { model . validation_rows = validation . numRows ( ) ; Frame adaptedValid = getValidation ( ) ; if ( getValidAdaptor ( ) . needsAdaptation2CM ( ) ) { adaptedValid . add ( getValidAdaptor ( ) . adaptedValidationResponse ( _responseName ) , getValidAdaptor ( ) . getAdaptedValidationResponse2CM ( ) ) ; } // validation scoring dataset can be sampled in multiple ways from the given validation dataset
if ( mp . classification && mp . balance_classes && mp . score_validation_sampling == ClassSamplingMethod . Stratified ) { validScoreFrame = updateFrame ( adaptedValid , sampleFrameStratified ( adaptedValid , adaptedValid . lastVec ( ) , null , mp . score_validation_samples > 0 ? mp . score_validation_samples : adaptedValid . numRows ( ) , mp . seed + 1 , false /* no oversampling */
, false ) ) ; } else { validScoreFrame = updateFrame ( adaptedValid , sampleFrame ( adaptedValid , mp . score_validation_samples , mp . seed + 1 ) ) ; } if ( mp . force_load_balance ) validScoreFrame = updateFrame ( validScoreFrame , reBalance ( validScoreFrame , false /* always split up globally since scoring should be distributed */
) ) ; if ( ! quiet_mode ) Log . info ( "Number of chunks of the validation data: " + validScoreFrame . anyVec ( ) . nChunks ( ) ) ; } // Set train _ samples _ per _ iteration size ( cannot be done earlier since this depends on whether stratified sampling is done )
model . actual_train_samples_per_iteration = computeTrainSamplesPerIteration ( mp , train . numRows ( ) , model ) ; // Determine whether shuffling is enforced
if ( mp . replicate_training_data && ( model . actual_train_samples_per_iteration == train . numRows ( ) * ( mp . single_node_mode ? 1 : H2O . CLOUD . size ( ) ) ) && ! mp . shuffle_training_data && H2O . CLOUD . size ( ) > 1 && ! mp . reproducible ) { Log . warn ( "Enabling training data shuffling, because all nodes train on the full dataset (replicated training data)." ) ; mp . shuffle_training_data = true ; } model . _timeLastScoreEnter = System . currentTimeMillis ( ) ; // to keep track of time per iteration , must be called before first call to doScoring
if ( ! mp . quiet_mode ) Log . info ( "Initial model:\n" + model . model_info ( ) ) ; if ( autoencoder ) model . doScoring ( train , trainScoreFrame , validScoreFrame , self ( ) , getValidAdaptor ( ) ) ; // get the null model reconstruction error
// put the initial version of the model into DKV
model . update ( self ( ) ) ; Log . info ( "Starting to train the Deep Learning model." ) ; // main loop
do model . set_model_info ( H2O . CLOUD . size ( ) > 1 && mp . replicate_training_data ? ( mp . single_node_mode ? new DeepLearningTask2 ( train , model . model_info ( ) , rowFraction ( train , mp , model ) ) . invoke ( Key . make ( ) ) . model_info ( ) : // replicated data + single node mode
new DeepLearningTask2 ( train , model . model_info ( ) , rowFraction ( train , mp , model ) ) . invokeOnAllNodes ( ) . model_info ( ) ) : // replicated data + multi - node mode
new DeepLearningTask ( model . model_info ( ) , rowFraction ( train , mp , model ) ) . doAll ( train ) . model_info ( ) ) ; // distributed data ( always in multi - node mode )
while ( model . doScoring ( train , trainScoreFrame , validScoreFrame , self ( ) , getValidAdaptor ( ) ) ) ; // replace the model with the best model so far ( if it ' s better )
if ( ! isCancelledOrCrashed ( ) && override_with_best_model && model . actual_best_model_key != null && n_folds == 0 ) { DeepLearningModel best_model = UKV . get ( model . actual_best_model_key ) ; if ( best_model != null && best_model . error ( ) < model . error ( ) && Arrays . equals ( best_model . model_info ( ) . units , model . model_info ( ) . units ) ) { Log . info ( "Setting the model to be the best model so far (based on scoring history)." ) ; DeepLearningModel . DeepLearningModelInfo mi = best_model . model_info ( ) . deep_clone ( ) ; // Don ' t cheat - count full amount of training samples , since that ' s the amount of training it took to train ( without finding anything better )
mi . set_processed_global ( model . model_info ( ) . get_processed_global ( ) ) ; mi . set_processed_local ( model . model_info ( ) . get_processed_local ( ) ) ; model . set_model_info ( mi ) ; model . update ( self ( ) ) ; model . doScoring ( train , trainScoreFrame , validScoreFrame , self ( ) , getValidAdaptor ( ) ) ; assert ( best_model . error ( ) == model . error ( ) ) ; } } Log . info ( model ) ; Log . info ( "Finished training the Deep Learning model." ) ; return model ; } catch ( JobCancelledException ex ) { model = UKV . get ( dest ( ) ) ; state = JobState . CANCELLED ; // for JSON REST response
model . get_params ( ) . state = state ; // for parameter JSON on the HTML page
Log . info ( "Deep Learning model building was cancelled." ) ; return model ; } catch ( Throwable t ) { t . printStackTrace ( ) ; model = UKV . get ( dest ( ) ) ; state = JobState . FAILED ; // for JSON REST response
if ( model != null ) { model . get_params ( ) . state = state ; // for parameter JSON on the HTML page
Log . info ( "Deep Learning model building failed." ) ; } return model ; } finally { if ( model != null && DKV . get ( model . _key ) != null ) model . unlock ( self ( ) ) ; unlock_data ( ) ; } |
public class AttributesInformation { /** * Attribute .
* @ param indexAttribute the index Attribute
* @ return the attribute */
public Attribute attribute ( int indexAttribute ) { } } | if ( this . attributes == null ) { // All attributes are numeric
return defaultNumericAttribute ( ) ; } int location = locateIndex ( indexAttribute ) ; if ( location == - 1 ) { // if there is not attribute information , it is numeric
return defaultNumericAttribute ( ) ; } return attributes [ location ] ; |
public class AccessProxy { int checkAccessControl ( String devName ) throws DevFailed { } } | if ( forced ) return TangoConst . ACCESS_WRITE ; synchronized ( monitor ) { // Check if already tested .
String str = dev_right_table . get ( devName ) ; if ( str != null ) { // System . out . println ( devname + " AccessControl already checked . " ) ;
if ( str . equals ( "write" ) ) return TangoConst . ACCESS_WRITE ; else return TangoConst . ACCESS_READ ; } // else
// System . out . println ( " Check AccessControl for " + devname ) ;
try { // If not already done check user name
if ( user == null ) user = System . getProperty ( "user.name" ) . toLowerCase ( ) ; hostAddr = ApiUtil . getHostAddress ( ) ; DeviceData argin = new DeviceData ( ) ; String rights ; if ( muliIP ) { Vector < String > addresses = ApiUtil . getHostAddresses ( ) ; String [ ] array = new String [ addresses . size ( ) + 2 ] ; int i = 0 ; array [ i ++ ] = user ; array [ i ++ ] = devName ; for ( String address : addresses ) { // System . out . println ( " Checking for : " + address ) ;
array [ i ++ ] = address ; } argin . insert ( array ) ; rights = command_inout ( "GetAccessForMultiIP" , argin ) . extractString ( ) ; } else { argin . insert ( new String [ ] { user , hostAddr , devName } ) ; rights = command_inout ( "GetAccess" , argin ) . extractString ( ) ; } // Check for user and host rights on specified device
dev_right_table . put ( devName , rights ) ; if ( rights . equals ( "write" ) ) { return TangoConst . ACCESS_WRITE ; } else { return TangoConst . ACCESS_READ ; } } catch ( DevFailed e ) { if ( e . errors [ 0 ] . reason . equals ( "TangoApi_DEVICE_NOT_EXPORTED" ) ) Except . re_throw_exception ( e , "TangoApi_CANNOT_CHECK_ACCESS_CONTROL" , "Cannot import Access Control device !" , "AccessProxy.checkAccessControl()" ) ; else if ( muliIP && e . errors [ 0 ] . reason . equals ( "API_CommandNotFound" ) ) { System . err . println ( e . errors [ 0 ] . desc + " - TAC server is an old version" ) ; muliIP = false ; return checkAccessControl ( devName ) ; } else throw e ; } } return TangoConst . ACCESS_READ ; |
public class Tools { /** * Gets the proper modulus operation .
* @ param x Integer .
* @ param m Modulo .
* @ return Modulus . */
public static int Mod ( int x , int m ) { } } | if ( m < 0 ) m = - m ; int r = x % m ; return r < 0 ? r + m : r ; |
public class SDNN { /** * Element - wise sigmoid function derivative : dL / dIn given input and dL / dOut
* @ param name Output variable name
* @ param x Input Variable
* @ param wrt Gradient at the output - dL / dOut . Must have same shape as the input
* @ return Output variable */
public SDVariable sigmoidDerivative ( String name , SDVariable x , SDVariable wrt ) { } } | validateFloatingPoint ( "sigmoidDerivative" , x ) ; SDVariable result = f ( ) . sigmoidDerivative ( x , wrt ) ; return updateVariableNameAndReference ( result , name ) ; |
public class UriTemplate { /** * Applies variable substitution the URI Template and returns the expanded
* URI .
* @ return the expanded URI as a String
* @ throw VariableExpansionException
* @ since 1.0 */
public String expand ( ) throws VariableExpansionException { } } | String template = getTemplate ( ) ; for ( Expression expression : expressions ) { final String replacement = expressionReplacementString ( expression , false ) ; template = template . replaceAll ( expression . getReplacementPattern ( ) , replacement ) ; } return template ; |
public class ReconciliationReportRowServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . admanager . axis . v201811 . ReconciliationReportRowServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201811 . ReconciliationReportRowServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201811 . ReconciliationReportRowServiceSoapBindingStub ( new java . net . URL ( ReconciliationReportRowServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getReconciliationReportRowServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class EipClient { /** * The method to generate a default Billing which is Postpaid .
* @ return The Billing object with Postpaid PaymentTiming . */
private Billing generateDefaultBilling ( ) { } } | Billing billing = new Billing ( ) ; billing . setPaymentTiming ( "Postpaid" ) ; billing . setBillingMethod ( "ByBandwidth" ) ; return billing ; |
public class JavaClassService { /** * Find a { @ link JavaClassModel } by the qualified name , returning a single result . If more than one result is available , a
* { @ link AmbiguousJavaClassModel } reference will be returned . */
public JavaClassModel getByName ( String qualifiedName ) throws NonUniqueResultException { } } | ExecutionStatistics . get ( ) . begin ( "getUniqueByName(qualifiedName)" ) ; JavaClassModel result = resolveByQualifiedName ( qualifiedName ) ; ExecutionStatistics . get ( ) . end ( "getUniqueByName(qualifiedName)" ) ; return result ; |
public class IndentingWriter { /** * Returns an indenting writer with the new indentation .
* Please note : Already written lines will not be modified to accomodate the new indentation .
* @ param newIndentation The new indentation to apply to this writer ( optional ) .
* @ return Either this writer if the indentation is already correct ,
* or a new IndentingWriter with the adapted indentation . */
public IndentingWriter withIndentation ( Indentation newIndentation ) { } } | return newIndentation == null || this . indentation . equals ( newIndentation ) ? this : new IndentingWriter ( delegate , newIndentation , lastWritten , addWhitespace . get ( ) ) ; |
public class JtsAdapter { /** * Create and return a feature from a geometry . Returns null on failure .
* @ param geom flat geometry via { @ link # flatFeatureList ( Geometry ) } that can be translated to a feature
* @ param cursor vector tile cursor position
* @ param layerProps layer properties for tagging features
* @ return new tile feature instance , or null on failure */
private static VectorTile . Tile . Feature toFeature ( Geometry geom , Vec2d cursor , MvtLayerProps layerProps , IUserDataConverter userDataConverter ) { } } | // Guard : UNKNOWN Geometry
final VectorTile . Tile . GeomType mvtGeomType = JtsAdapter . toGeomType ( geom ) ; if ( mvtGeomType == VectorTile . Tile . GeomType . UNKNOWN ) { return null ; } final VectorTile . Tile . Feature . Builder featureBuilder = VectorTile . Tile . Feature . newBuilder ( ) ; final boolean mvtClosePath = MvtUtil . shouldClosePath ( mvtGeomType ) ; final List < Integer > mvtGeom = new ArrayList < > ( ) ; featureBuilder . setType ( mvtGeomType ) ; if ( geom instanceof Point || geom instanceof MultiPoint ) { // Encode as MVT point or multipoint
mvtGeom . addAll ( ptsToGeomCmds ( geom , cursor ) ) ; } else if ( geom instanceof LineString || geom instanceof MultiLineString ) { // Encode as MVT linestring or multi - linestring
for ( int i = 0 ; i < geom . getNumGeometries ( ) ; ++ i ) { mvtGeom . addAll ( linesToGeomCmds ( geom . getGeometryN ( i ) , mvtClosePath , cursor , 1 ) ) ; } } else if ( geom instanceof MultiPolygon || geom instanceof Polygon ) { // Encode as MVT polygon or multi - polygon
for ( int i = 0 ; i < geom . getNumGeometries ( ) ; ++ i ) { final Polygon nextPoly = ( Polygon ) geom . getGeometryN ( i ) ; final List < Integer > nextPolyGeom = new ArrayList < > ( ) ; boolean valid = true ; // Add exterior ring
final LineString exteriorRing = nextPoly . getExteriorRing ( ) ; // Area must be non - zero
final double exteriorArea = CGAlgorithms . signedArea ( exteriorRing . getCoordinates ( ) ) ; if ( ( ( int ) Math . round ( exteriorArea ) ) == 0 ) { continue ; } // Check CCW Winding ( must be positive area )
if ( exteriorArea < 0d ) { CoordinateArrays . reverse ( exteriorRing . getCoordinates ( ) ) ; } nextPolyGeom . addAll ( linesToGeomCmds ( exteriorRing , mvtClosePath , cursor , 2 ) ) ; // Add interior rings
for ( int ringIndex = 0 ; ringIndex < nextPoly . getNumInteriorRing ( ) ; ++ ringIndex ) { final LineString nextInteriorRing = nextPoly . getInteriorRingN ( ringIndex ) ; // Area must be non - zero
final double interiorArea = CGAlgorithms . signedArea ( nextInteriorRing . getCoordinates ( ) ) ; if ( ( ( int ) Math . round ( interiorArea ) ) == 0 ) { continue ; } // Check CW Winding ( must be negative area )
if ( interiorArea > 0d ) { CoordinateArrays . reverse ( nextInteriorRing . getCoordinates ( ) ) ; } // Interior ring area must be < exterior ring area , or entire geometry is invalid
if ( Math . abs ( exteriorArea ) <= Math . abs ( interiorArea ) ) { valid = false ; break ; } nextPolyGeom . addAll ( linesToGeomCmds ( nextInteriorRing , mvtClosePath , cursor , 2 ) ) ; } if ( valid ) { mvtGeom . addAll ( nextPolyGeom ) ; } } } if ( mvtGeom . size ( ) < 1 ) { return null ; } featureBuilder . addAllGeometry ( mvtGeom ) ; // Feature Properties
userDataConverter . addTags ( geom . getUserData ( ) , layerProps , featureBuilder ) ; return featureBuilder . build ( ) ; |
public class Environment { /** * Performs POST to supplied url of result of applying template with model .
* All namespaces registered in this environment will be registered with result .
* @ param url url to post to .
* @ param templateName name of template to use .
* @ param model model for template .
* @ param result result to populate with response . */
public void callService ( String url , String templateName , Object model , XmlHttpResponse result ) { } } | callService ( url , templateName , model , result , null ) ; |
public class ScoreTemplate { /** * Sets the font family of several fields
* @ param fields
* one of { @ link ScoreElements } constants
* @ param fontFamilies
* array of font names , e . g . { " Georgia " , " Verdana " } */
public void setTextFontFamilyName ( byte [ ] fields , String [ ] fontFamilies ) { } } | if ( fontFamilies == null ) fontFamilies = new String [ 0 ] ; for ( byte field : fields ) { getFieldInfos ( field ) . m_fontFamilyNames = fontFamilies ; } notifyListeners ( ) ; |
public class EclipseIndexWriter { /** * Logic for adding various start index entry elements for Eclipse help .
* @ param term The indexterm to be processed .
* @ param printWriter The Writer used for writing content to disk .
* @ param indexsee Boolean value for using the new markup for see references . */
private void outputIndexTermStartElement ( final IndexTerm term , final XMLStreamWriter serializer , final boolean indexsee ) throws XMLStreamException { } } | // RFE 2987769 Eclipse index - see
if ( indexsee ) { if ( term . getTermPrefix ( ) != null ) { inIndexsee = true ; serializer . writeStartElement ( "see" ) ; serializer . writeAttribute ( "keyword" , term . getTermName ( ) ) ; } else if ( inIndexsee ) { // subterm of an indexsee .
serializer . writeStartElement ( "subpath" ) ; serializer . writeAttribute ( "keyword" , term . getTermName ( ) ) ; serializer . writeEndElement ( ) ; // subpath
} else { serializer . writeStartElement ( "entry" ) ; serializer . writeAttribute ( "keyword" , term . getTermName ( ) ) ; outputIndexEntryEclipseIndexsee ( term , serializer ) ; } } else { serializer . writeStartElement ( "entry" ) ; serializer . writeAttribute ( "keyword" , term . getTermFullName ( ) ) ; outputIndexEntry ( term , serializer ) ; } |
public class CacheStatsModule { /** * Updates statistics using two supplied arguments - maxInMemoryCacheSize and currentInMemoryCacheSize .
* @ param max
* Maximum # of entries that can be stored in memory
* @ param current
* Current # of in memory cache entries */
public void updateCacheSizes ( long max , long current ) { } } | final String methodName = "updateCacheSizes()" ; if ( tc . isDebugEnabled ( ) && null != _maxInMemoryCacheEntryCount && null != _inMemoryCacheEntryCount ) { if ( max != _maxInMemoryCacheEntryCount . getCount ( ) && _inMemoryCacheEntryCount . getCount ( ) != current ) Tr . debug ( tc , methodName + " cacheName=" + _sCacheName + " max=" + max + " current=" + current + " enable=" + this . _enable , this ) ; } if ( _enable ) { if ( _maxInMemoryCacheEntryCount != null ) _maxInMemoryCacheEntryCount . setCount ( max ) ; if ( _inMemoryCacheEntryCount != null ) _inMemoryCacheEntryCount . setCount ( current ) ; } |
public class UserDistributionTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case BpsimPackage . USER_DISTRIBUTION_TYPE__GROUP : if ( coreType ) return getGroup ( ) ; return ( ( FeatureMap . Internal ) getGroup ( ) ) . getWrapper ( ) ; case BpsimPackage . USER_DISTRIBUTION_TYPE__USER_DISTRIBUTION_DATA_POINT : return getUserDistributionDataPoint ( ) ; case BpsimPackage . USER_DISTRIBUTION_TYPE__DISCRETE : return isDiscrete ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class DescribeBundleTasksRequest { /** * The bundle task IDs .
* Default : Describes all your bundle tasks .
* @ return The bundle task IDs . < / p >
* Default : Describes all your bundle tasks . */
public java . util . List < String > getBundleIds ( ) { } } | if ( bundleIds == null ) { bundleIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return bundleIds ; |
public class ArgumentDescriptor { /** * Sets up this object to represent a value that is derived from a field
* in the corresponding class - descriptor .
* If the value of < code > fieldRefName < / code > is blank or refers to an
* invalid field reference , then the value of the corresponding argument
* will be set to null . In this case , { @ link # getIsReturnedByProcedure }
* will be set to < code > false < / code > , regardless of the value of the
* < code > returnedByProcedure < / code > argument .
* @ param fieldRefName the name of the field reference that provides the
* value of this argument .
* @ param returnedByProcedure indicates that the value of the argument
* is returned by the procedure that is invoked . */
public void setValue ( String fieldRefName , boolean returnedByProcedure ) { } } | this . fieldSource = SOURCE_FIELD ; this . fieldRefName = fieldRefName ; this . returnedByProcedure = returnedByProcedure ; this . constantValue = null ; // If the field reference is not valid , then disregard the value
// of the returnedByProcedure argument .
if ( this . getFieldRef ( ) == null ) { this . returnedByProcedure = false ; } // If the field reference is not valid , then disregard the value
// of the returnedByProcedure argument .
if ( this . getFieldRef ( ) == null ) { this . returnedByProcedure = false ; } |
public class ST_Snap { /** * Snaps two geometries together with a given tolerance
* @ param geometryA a geometry to snap
* @ param geometryB a geometry to snap
* @ param distance the tolerance to use
* @ return the snapped geometries */
public static Geometry snap ( Geometry geometryA , Geometry geometryB , double distance ) { } } | if ( geometryA == null || geometryB == null ) { return null ; } Geometry [ ] snapped = GeometrySnapper . snap ( geometryA , geometryB , distance ) ; return snapped [ 0 ] ; |
public class ApiOvhCloud { /** * Get volume details
* REST : GET / cloud / project / { serviceName } / volume / { volumeId }
* @ param serviceName [ required ] Project id
* @ param volumeId [ required ] Volume id */
public OvhVolume project_serviceName_volume_volumeId_GET ( String serviceName , String volumeId ) throws IOException { } } | String qPath = "/cloud/project/{serviceName}/volume/{volumeId}" ; StringBuilder sb = path ( qPath , serviceName , volumeId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhVolume . class ) ; |
public class GroupContactSet { /** * Tell whether the given pair is a contact in this GroupContactSet ,
* the comparison is done by matching residue numbers and chain identifiers
* @ param resNumber1
* @ param resNumber2
* @ return */
public boolean hasContact ( ResidueNumber resNumber1 , ResidueNumber resNumber2 ) { } } | return contacts . containsKey ( new Pair < ResidueNumber > ( resNumber1 , resNumber2 ) ) ; |
public class DropboxBrowse { /** * Reads in a single field of a multipart / form - data request . If the field is not present
* or the field is longer than maxLength , we ' ll use the given HttpServletResponse to respond
* with an error and then return null .
* Otherwise , process it as UTF - 8 bytes and return the equivalent String . */
private static String slurpUtf8Part ( HttpServletRequest request , HttpServletResponse response , String name , int maxLength ) throws IOException , ServletException { } } | Part part = request . getPart ( name ) ; if ( part == null ) { response . sendError ( 400 , "Form field " + jq ( name ) + " is missing" ) ; return null ; } byte [ ] bytes = new byte [ maxLength ] ; InputStream in = part . getInputStream ( ) ; int bytesRead = in . read ( bytes ) ; if ( bytesRead == - 1 ) { return "" ; } String s = StringUtil . utf8ToString ( bytes , 0 , bytesRead ) ; if ( in . read ( ) != - 1 ) { response . sendError ( 400 , "Field " + jq ( name ) + " is too long (the limit is " + maxLength + " bytes): " + jq ( s ) ) ; return null ; } // TODO : We ' re just assuming the content is UTF - 8 text . We should actually check it .
return s ; |
public class ZonedDateTimeRangeRandomizer { /** * Create a new { @ link ZonedDateTimeRangeRandomizer } .
* @ param min min value
* @ param max max value
* @ param seed initial seed
* @ return a new { @ link ZonedDateTimeRangeRandomizer } . */
public static ZonedDateTimeRangeRandomizer aNewZonedDateTimeRangeRandomizer ( final ZonedDateTime min , final ZonedDateTime max , final long seed ) { } } | return new ZonedDateTimeRangeRandomizer ( min , max , seed ) ; |
public class AbucoinsAccountServiceRaw { /** * Corresponds to < code > GET deposits / history < / code >
* @ param cryptoRequest
* @ return
* @ throws IOException */
public AbucoinsDepositsHistory abucoinsDepositHistory ( ) throws IOException { } } | AbucoinsDepositsHistory history = abucoinsAuthenticated . depositsHistory ( exchange . getExchangeSpecification ( ) . getApiKey ( ) , signatureCreator , exchange . getExchangeSpecification ( ) . getPassword ( ) , timestamp ( ) ) ; if ( history . getHistory ( ) . length > 0 && history . getHistory ( ) [ 0 ] . getMessage ( ) != null ) throw new ExchangeException ( history . getHistory ( ) [ 0 ] . getMessage ( ) ) ; return history ; |
public class SimpleMMcifConsumer { /** * Build sites in a BioJava Structure using the original author chain id & residue numbers .
* Sites are built from struct _ site _ gen records that have been parsed . */
private void addSites ( ) { } } | List < Site > sites = structure . getSites ( ) ; if ( sites == null ) sites = new ArrayList < Site > ( ) ; for ( StructSiteGen siteGen : structSiteGens ) { // For each StructSiteGen , find the residues involved , if they exist then
String site_id = siteGen . getSite_id ( ) ; // multiple could be in same site .
if ( site_id == null ) site_id = "" ; String comp_id = siteGen . getLabel_comp_id ( ) ; // PDBName
// Assumption : the author chain ID and residue number for the site is consistent with the original
// author chain id and residue numbers .
String asymId = siteGen . getLabel_asym_id ( ) ; // chain name
String authId = siteGen . getAuth_asym_id ( ) ; // chain Id
String auth_seq_id = siteGen . getAuth_seq_id ( ) ; // Res num
String insCode = siteGen . getPdbx_auth_ins_code ( ) ; if ( insCode != null && insCode . equals ( "?" ) ) insCode = null ; // Look for asymID = chainID and seqID = seq _ ID . Check that comp _ id matches the resname .
Group g = null ; try { Chain chain = structure . getChain ( asymId ) ; if ( null != chain ) { try { Character insChar = null ; if ( null != insCode && insCode . length ( ) > 0 ) insChar = insCode . charAt ( 0 ) ; g = chain . getGroupByPDB ( new ResidueNumber ( null , Integer . parseInt ( auth_seq_id ) , insChar ) ) ; } catch ( NumberFormatException e ) { logger . warn ( "Could not lookup residue : " + authId + auth_seq_id ) ; } } } catch ( StructureException e ) { logger . warn ( "Problem finding residue in site entry " + siteGen . getSite_id ( ) + " - " + e . getMessage ( ) , e . getMessage ( ) ) ; } if ( g != null ) { // 2 . find the site _ id , if not existing , create anew .
Site site = null ; for ( Site asite : sites ) { if ( site_id . equals ( asite . getSiteID ( ) ) ) site = asite ; } boolean addSite = false ; // 3 . add this residue to the site .
if ( site == null ) { addSite = true ; site = new Site ( ) ; site . setSiteID ( site_id ) ; } List < Group > groups = site . getGroups ( ) ; if ( groups == null ) groups = new ArrayList < Group > ( ) ; // Check the self - consistency of the residue reference from auth _ seq _ id and chain _ id
if ( ! comp_id . equals ( g . getPDBName ( ) ) ) { logger . warn ( "comp_id doesn't match the residue at " + authId + " " + auth_seq_id + " - skipping" ) ; } else { groups . add ( g ) ; site . setGroups ( groups ) ; } if ( addSite ) sites . add ( site ) ; } } structure . setSites ( sites ) ; |
public class BatchItemErrorMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BatchItemError batchItemError , ProtocolMarshaller protocolMarshaller ) { } } | if ( batchItemError == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchItemError . getIndex ( ) , INDEX_BINDING ) ; protocolMarshaller . marshall ( batchItemError . getErrorCode ( ) , ERRORCODE_BINDING ) ; protocolMarshaller . marshall ( batchItemError . getErrorMessage ( ) , ERRORMESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DescribeDBEngineVersionsResult { /** * A list of < code > DBEngineVersion < / code > elements .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDBEngineVersions ( java . util . Collection ) } or { @ link # withDBEngineVersions ( java . util . Collection ) } if you
* want to override the existing values .
* @ param dBEngineVersions
* A list of < code > DBEngineVersion < / code > elements .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeDBEngineVersionsResult withDBEngineVersions ( DBEngineVersion ... dBEngineVersions ) { } } | if ( this . dBEngineVersions == null ) { setDBEngineVersions ( new com . amazonaws . internal . SdkInternalList < DBEngineVersion > ( dBEngineVersions . length ) ) ; } for ( DBEngineVersion ele : dBEngineVersions ) { this . dBEngineVersions . add ( ele ) ; } return this ; |
public class PullToDismissPager { /** * Smoothly animate mDraggingPane to the target X position within its range .
* @ param slideOffset position to animate to
* @ param velocity initial velocity in case of fling , or 0. */
boolean smoothSlideTo ( float slideOffset , int velocity ) { } } | if ( ! isSlidingEnabled ( ) ) { // Nothing to do .
return false ; } int panelTop = computePanelTopPosition ( slideOffset ) ; if ( mDragHelper . smoothSlideViewTo ( mSlideableView , mSlideableView . getLeft ( ) , panelTop ) ) { setAllChildrenVisible ( ) ; ViewCompat . postInvalidateOnAnimation ( this ) ; return true ; } return false ; |
public class Stage { /** * Answers a newly created Actor instance from the internal ActorFactory . ( INTERNAL ONLY )
* @ param definition the Definition of the Actor to create
* @ param parent the Actor parent of the new Actor
* @ param maybeAddress the possible Address of the Actor to create
* @ param maybeMailbox the possible Mailbox of the Actor to create
* @ param maybeSupervisor the possible Supervisor of the Actor to create
* @ param logger the Logger of the Actor to create
* @ param < T > the protocol type
* @ return Actor
* @ throws Exception thrown if there is a problem with Actor creation */
private < T > Actor createRawActor ( final Definition definition , final Actor parent , final Address maybeAddress , final Mailbox maybeMailbox , final Supervisor maybeSupervisor , final Logger logger ) throws Exception { } } | if ( isStopped ( ) ) { throw new IllegalStateException ( "Actor stage has been stopped." ) ; } final Address address = maybeAddress != null ? maybeAddress : world . addressFactory ( ) . uniqueWith ( definition . actorName ( ) ) ; if ( directory . isRegistered ( address ) ) { throw new IllegalStateException ( "Address already exists: " + address ) ; } final Mailbox mailbox = maybeMailbox != null ? maybeMailbox : ActorFactory . actorMailbox ( this , address , definition ) ; final Actor actor ; try { actor = ActorFactory . actorFor ( this , parent , definition , address , mailbox , maybeSupervisor , logger ) ; } catch ( Exception e ) { logger . log ( "Actor instantiation failed because: " + e . getMessage ( ) , e ) ; throw new IllegalArgumentException ( "Actor instantiation failed because: " + e . getMessage ( ) , e ) ; } directory . register ( actor . address ( ) , actor ) ; actor . lifeCycle . beforeStart ( actor ) ; return actor ; |
public class AWSGlobalAcceleratorClient { /** * Create an endpoint group for the specified listener . An endpoint group is a collection of endpoints in one AWS
* Region . To see an AWS CLI example of creating an endpoint group , scroll down to < b > Example < / b > .
* @ param createEndpointGroupRequest
* @ return Result of the CreateEndpointGroup operation returned by the service .
* @ throws AcceleratorNotFoundException
* The accelerator that you specified doesn ' t exist .
* @ throws EndpointGroupAlreadyExistsException
* The endpoint group that you specified already exists .
* @ throws ListenerNotFoundException
* The listener that you specified doesn ' t exist .
* @ throws InternalServiceErrorException
* There was an internal error for AWS Global Accelerator .
* @ throws InvalidArgumentException
* An argument that you specified is invalid .
* @ throws LimitExceededException
* Processing your request would cause you to exceed an AWS Global Accelerator limit .
* @ sample AWSGlobalAccelerator . CreateEndpointGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / globalaccelerator - 2018-08-08 / CreateEndpointGroup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateEndpointGroupResult createEndpointGroup ( CreateEndpointGroupRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateEndpointGroup ( request ) ; |
public class IncomingPhoneNumberUpdater { /** * Add the requested post parameters to the Request .
* @ param request Request to add post params to */
private void addPostParams ( final Request request ) { } } | if ( accountSid != null ) { request . addPostParam ( "AccountSid" , accountSid ) ; } if ( apiVersion != null ) { request . addPostParam ( "ApiVersion" , apiVersion ) ; } if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( smsApplicationSid != null ) { request . addPostParam ( "SmsApplicationSid" , smsApplicationSid ) ; } if ( smsFallbackMethod != null ) { request . addPostParam ( "SmsFallbackMethod" , smsFallbackMethod . toString ( ) ) ; } if ( smsFallbackUrl != null ) { request . addPostParam ( "SmsFallbackUrl" , smsFallbackUrl . toString ( ) ) ; } if ( smsMethod != null ) { request . addPostParam ( "SmsMethod" , smsMethod . toString ( ) ) ; } if ( smsUrl != null ) { request . addPostParam ( "SmsUrl" , smsUrl . toString ( ) ) ; } if ( statusCallback != null ) { request . addPostParam ( "StatusCallback" , statusCallback . toString ( ) ) ; } if ( statusCallbackMethod != null ) { request . addPostParam ( "StatusCallbackMethod" , statusCallbackMethod . toString ( ) ) ; } if ( voiceApplicationSid != null ) { request . addPostParam ( "VoiceApplicationSid" , voiceApplicationSid ) ; } if ( voiceCallerIdLookup != null ) { request . addPostParam ( "VoiceCallerIdLookup" , voiceCallerIdLookup . toString ( ) ) ; } if ( voiceFallbackMethod != null ) { request . addPostParam ( "VoiceFallbackMethod" , voiceFallbackMethod . toString ( ) ) ; } if ( voiceFallbackUrl != null ) { request . addPostParam ( "VoiceFallbackUrl" , voiceFallbackUrl . toString ( ) ) ; } if ( voiceMethod != null ) { request . addPostParam ( "VoiceMethod" , voiceMethod . toString ( ) ) ; } if ( voiceUrl != null ) { request . addPostParam ( "VoiceUrl" , voiceUrl . toString ( ) ) ; } if ( emergencyStatus != null ) { request . addPostParam ( "EmergencyStatus" , emergencyStatus . toString ( ) ) ; } if ( emergencyAddressSid != null ) { request . addPostParam ( "EmergencyAddressSid" , emergencyAddressSid ) ; } if ( trunkSid != null ) { request . addPostParam ( "TrunkSid" , trunkSid ) ; } if ( voiceReceiveMode != null ) { request . addPostParam ( "VoiceReceiveMode" , voiceReceiveMode . toString ( ) ) ; } if ( identitySid != null ) { request . addPostParam ( "IdentitySid" , identitySid ) ; } if ( addressSid != null ) { request . addPostParam ( "AddressSid" , addressSid ) ; } |
public class ReloadTablesRequest { /** * The name and schema of the table to be reloaded .
* @ param tablesToReload
* The name and schema of the table to be reloaded . */
public void setTablesToReload ( java . util . Collection < TableToReload > tablesToReload ) { } } | if ( tablesToReload == null ) { this . tablesToReload = null ; return ; } this . tablesToReload = new java . util . ArrayList < TableToReload > ( tablesToReload ) ; |
public class VirtualMachineScaleSetVMsInner { /** * Gets a virtual machine from a VM scale set .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ param instanceId The instance ID of the virtual machine .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VirtualMachineScaleSetVMInner object if successful . */
public VirtualMachineScaleSetVMInner get ( String resourceGroupName , String vmScaleSetName , String instanceId ) { } } | return getWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class HttpRequestMessageImpl { /** * Deserialize the method information from the input stream .
* @ param stream
* @ throws IOException
* @ throws ClassNotFoundException */
private void deserializeMethod ( ObjectInput stream ) throws IOException , ClassNotFoundException { } } | MethodValues method = null ; if ( SERIALIZATION_V2 == getDeserializationVersion ( ) ) { method = MethodValues . find ( readByteArray ( stream ) ) ; } else { method = MethodValues . find ( ( String ) stream . readObject ( ) ) ; } if ( null == method ) { throw new IOException ( "Missing method" ) ; } setMethod ( method ) ; |
public class CassandraSchemaMgr { /** * Return true if a keyspace with the given name exists . This method can be used with
* any DB connection .
* @ param dbConn Database connection to use .
* @ param keyspace Keyspace name .
* @ return True if it exists . */
public boolean keyspaceExists ( DBConn dbConn , String keyspace ) { } } | try { dbConn . getClientSession ( ) . describe_keyspace ( keyspace ) ; return true ; } catch ( Exception e ) { return false ; // Notfound
} |
public class StringStartWithMatcher { /** * Normalize matching definitions according to requirements of { @ link StartWithMatcher } . < br >
* 根据 { @ link StartWithMatcher } 的需要来规范化匹配条件定义 。
* @ param headingDefinitionsKey是匹配字符串 , Value是附件对象 。
* 当进行匹配检查的时候 , 返回附件对象来标识哪一个匹配字符串被匹配上了 。
* Key is the heading string , Value is its associated attachment object .
* When the heading string is matched , the attachment object will be returned
* as identifier .
* @ return { @ link StartWithMatcher } 所需的匹配条件定义 。
* < br > Matching definitions for usage of { @ link StartWithMatcher } . */
static protected List < MatchingDefinition > normalizeMatchingDefinitions ( Map < String , ? extends Object > headingDefinitions ) { } } | // exactMatchExample自动设置为与regularExpression相同
List < MatchingDefinition > l = new ArrayList < MatchingDefinition > ( headingDefinitions . size ( ) ) ; for ( Map . Entry < String , ? extends Object > e : headingDefinitions . entrySet ( ) ) { MatchingDefinition c = new MatchingDefinition ( ) ; c . setRegularExpression ( escapeForRegExp ( e . getKey ( ) ) ) ; c . setAttachment ( e . getValue ( ) ) ; c . setExactMatchExample ( e . getKey ( ) ) ; l . add ( c ) ; } return l ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcAnalysisTheoryTypeEnum createIfcAnalysisTheoryTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcAnalysisTheoryTypeEnum result = IfcAnalysisTheoryTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link IdentifierType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link IdentifierType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "methodID" ) public JAXBElement < IdentifierType > createMethodID ( IdentifierType value ) { } } | return new JAXBElement < IdentifierType > ( _MethodID_QNAME , IdentifierType . class , null , value ) ; |
public class AWSGlueClient { /** * Retrieves the names of all DevEndpoint resources in this AWS account , or the resources with the specified tag .
* This operation allows you to see which resources are available in your account , and their names .
* This operation takes the optional < code > Tags < / code > field which you can use as a filter on the response so that
* tagged resources can be retrieved as a group . If you choose to use tags filtering , only resources with the tag
* will be retrieved .
* @ param listDevEndpointsRequest
* @ return Result of the ListDevEndpoints operation returned by the service .
* @ throws InvalidInputException
* The input provided was not valid .
* @ throws EntityNotFoundException
* A specified entity does not exist
* @ throws InternalServiceException
* An internal service error occurred .
* @ throws OperationTimeoutException
* The operation timed out .
* @ sample AWSGlue . ListDevEndpoints
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / ListDevEndpoints " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListDevEndpointsResult listDevEndpoints ( ListDevEndpointsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListDevEndpoints ( request ) ; |
public class Client { /** * Take an IOException and the address we were trying to connect to
* and return an IOException with the input exception as the cause .
* The new exception provides the stack trace of the place where
* the exception is thrown and some extra diagnostics information .
* If the exception is ConnectException or SocketTimeoutException ,
* return a new one of the same type ; Otherwise return an IOException .
* @ param addr
* target address
* @ param exception
* the relevant exception
* @ return an exception to throw */
private IOException wrapException ( InetSocketAddress addr , IOException exception ) { } } | if ( exception instanceof ConnectException ) { // connection refused ; include the host : port in the error
return ( ConnectException ) new ConnectException ( "Call to " + addr + " failed on connection exception: " + exception ) . initCause ( exception ) ; } else if ( exception instanceof SocketTimeoutException ) { return ( SocketTimeoutException ) new SocketTimeoutException ( "Call to " + addr + " failed on socket timeout exception: " + exception ) . initCause ( exception ) ; } else { return ( IOException ) new IOException ( "Call to " + addr + " failed on local exception: " + exception ) . initCause ( exception ) ; } |
public class Signature { /** * Returns the number of arguments for the provided string .
* @ param s { @ link String }
* @ return { @ link String } */
private static String countArgs ( final String s ) { } } | String [ ] args = ARGS_REGEX . split ( s ) ; if ( "" . equals ( args [ 1 ] ) ) return "0" ; String [ ] argarray = args [ 1 ] . split ( "," ) ; for ( final String arg : argarray ) if ( arg . contains ( VARARGS_SUFFIX ) ) return "1 or more" ; return valueOf ( argarray . length ) ; |
public class RawPacket { /** * Read an unsigned integer as long at specified offset
* @ param off start offset of this unsigned integer
* @ return unsigned integer as long at offset */
public long readUnsignedIntAsLong ( int off ) { } } | buffer . position ( off ) ; return ( ( ( long ) ( buffer . get ( ) & 0xff ) << 24 ) | ( ( long ) ( buffer . get ( ) & 0xff ) << 16 ) | ( ( long ) ( buffer . get ( ) & 0xff ) << 8 ) | ( ( long ) ( buffer . get ( ) & 0xff ) ) ) & 0xFFFFFFFFL ; |
public class ConfigReference { /** * broken . */
@ Override ConfigReference relativized ( Path prefix ) { } } | SubstitutionExpression newExpr = expr . changePath ( expr . path ( ) . prepend ( prefix ) ) ; return new ConfigReference ( origin ( ) , newExpr , prefixLength + prefix . length ( ) ) ; |
public class DescribeFileSystemsResult { /** * An array of file system descriptions .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFileSystems ( java . util . Collection ) } or { @ link # withFileSystems ( java . util . Collection ) } if you want to
* override the existing values .
* @ param fileSystems
* An array of file system descriptions .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeFileSystemsResult withFileSystems ( FileSystemDescription ... fileSystems ) { } } | if ( this . fileSystems == null ) { setFileSystems ( new com . amazonaws . internal . SdkInternalList < FileSystemDescription > ( fileSystems . length ) ) ; } for ( FileSystemDescription ele : fileSystems ) { this . fileSystems . add ( ele ) ; } return this ; |
public class HpelCBEFormatter { /** * Appends the CBE Source XML element of a record to a String buffer
* @ param sb the string buffer the element will be added to
* @ param record the record that represents the common base event */
private void createSourceElement ( StringBuilder sb , RepositoryLogRecord record ) { } } | String hostAddr = headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_HOSTADDRESS ) == null ? "" : headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_HOSTADDRESS ) ; String hostType = headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_HOSTTYPE ) == null ? "" : headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_HOSTTYPE ) ; // 660484 . . . got rid of string concat since we already had the string buffer to do it better
sb . append ( lineSeparator ) . append ( INDENT [ 0 ] ) . append ( "<sourceComponentId component=\"Logging\" componentIdType=\"Application\"" ) ; sb . append ( " executionEnvironment=\"Java\" instanceId=\"" ) . append ( headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_SERVER_NAME ) ) . append ( "\"" ) ; sb . append ( " location=\"" ) . append ( hostAddr ) . append ( "\" locationType=\"" ) . append ( hostType ) . append ( "\"" ) ; sb . append ( " processId=\"" ) . append ( headerProps . getProperty ( ServerInstanceLogRecordList . HEADER_PROCESSID ) ) . append ( "\"" ) . append ( " subComponent=\"Logger\"" ) ; sb . append ( " threadId=\"" ) . append ( record . getExtension ( RepositoryLogRecord . PTHREADID ) ) . append ( "\"" ) ; sb . append ( " componentType=\"Logging_Application\"/>" ) ; |
public class Step { /** * List of screenshot Urls for the execution step , if relevant .
* @ param screenshots
* List of screenshot Urls for the execution step , if relevant .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Step withScreenshots ( java . util . Map < String , String > screenshots ) { } } | setScreenshots ( screenshots ) ; return this ; |
public class S3CryptoModuleBase { /** * Creates and returns a non - null content crypto material for the given
* request .
* @ throws SdkClientException if no encryption material can be found . */
protected final ContentCryptoMaterial createContentCryptoMaterial ( AmazonWebServiceRequest req ) { } } | if ( req instanceof EncryptionMaterialsFactory ) { // per request level encryption materials
EncryptionMaterialsFactory f = ( EncryptionMaterialsFactory ) req ; final EncryptionMaterials materials = f . getEncryptionMaterials ( ) ; if ( materials != null ) { return buildContentCryptoMaterial ( materials , req ) ; } } if ( req instanceof MaterialsDescriptionProvider ) { // per request level material description
MaterialsDescriptionProvider mdp = ( MaterialsDescriptionProvider ) req ; Map < String , String > matdesc_req = mdp . getMaterialsDescription ( ) ; ContentCryptoMaterial ccm = newContentCryptoMaterial ( kekMaterialsProvider , matdesc_req , cryptoConfig . getCryptoProvider ( ) , req ) ; if ( ccm != null ) return ccm ; if ( matdesc_req != null ) { // check to see if KMS is in use and if so we should fall thru
// to the s3 client level encryption material
EncryptionMaterials material = kekMaterialsProvider . getEncryptionMaterials ( ) ; if ( ! material . isKMSEnabled ( ) ) { throw new SdkClientException ( "No material available from the encryption material provider for description " + matdesc_req ) ; } } // if there is no material description , fall thru to use
// the per s3 client level encryption materials
} // per s3 client level encryption materials
return newContentCryptoMaterial ( this . kekMaterialsProvider , cryptoConfig . getCryptoProvider ( ) , req ) ; |
public class AwsClientBuilder { /** * Sets the time offset used for IAM token refresh by the DefaultTokenManager .
* This should only be over written for a dev or staging environment
* @ param offset , percentage of token life before expiration that token should be refreshed .
* @ return This object for method chaining . */
public Subclass withIAMTokenRefresh ( double offset ) { } } | this . iamTokenRefreshOffset = offset ; if ( ( offset > 0 ) && ( this . credentials . getCredentials ( ) instanceof IBMOAuthCredentials ) && ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) instanceof DefaultTokenManager ) { ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . setIamRefreshOffset ( iamTokenRefreshOffset ) ; } return getSubclass ( ) ; |
public class DeploymentConfigMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeploymentConfig deploymentConfig , ProtocolMarshaller protocolMarshaller ) { } } | if ( deploymentConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deploymentConfig . getConcurrentDeploymentPercentage ( ) , CONCURRENTDEPLOYMENTPERCENTAGE_BINDING ) ; protocolMarshaller . marshall ( deploymentConfig . getFailureThresholdPercentage ( ) , FAILURETHRESHOLDPERCENTAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CovariantTypes { /** * Returns < tt > true < / tt > if < tt > type2 < / tt > is a " sub - variable " of < tt > type1 < / tt > , i . e . if they are equal or if
* < tt > type2 < / tt > ( transitively ) extends < tt > type1 < / tt > . */
private static boolean isAssignableFrom ( TypeVariable < ? > type1 , TypeVariable < ? > type2 ) { } } | if ( type1 . equals ( type2 ) ) { return true ; } // if a type variable extends another type variable , it cannot declare other bounds
if ( type2 . getBounds ( ) [ 0 ] instanceof TypeVariable < ? > ) { return isAssignableFrom ( type1 , ( TypeVariable < ? > ) type2 . getBounds ( ) [ 0 ] ) ; } return false ; |
public class BasicEvaluationCtx { /** * This is quick helper function to provide a little structure for the
* subject attributes so we can search for them ( somewhat ) quickly . The
* basic idea is to have a map indexed by SubjectCategory that keeps
* Maps that in turn are indexed by id and keep the unique ctx . Attribute
* objects . */
private void setupSubjects ( List < Subject > subjects ) throws ParsingException { } } | // make sure that there is at least one Subject
if ( subjects . size ( ) == 0 ) throw new ParsingException ( "Request must a contain subject" ) ; // now go through the subject attributes
Iterator < Subject > it = subjects . iterator ( ) ; while ( it . hasNext ( ) ) { Subject subject = it . next ( ) ; URI category = subject . getCategory ( ) ; Map < String , List < Attribute > > categoryMap = null ; // see if we ' ve already got a map for the category
if ( subjectMap . containsKey ( category ) ) { categoryMap = subjectMap . get ( category ) ; } else { categoryMap = new HashMap < String , List < Attribute > > ( ) ; subjectMap . put ( category , categoryMap ) ; } // iterate over the set of attributes
Iterator attrIterator = subject . getAttributesAsList ( ) . iterator ( ) ; while ( attrIterator . hasNext ( ) ) { Attribute attr = ( Attribute ) ( attrIterator . next ( ) ) ; String id = attr . getId ( ) . toString ( ) ; if ( categoryMap . containsKey ( id ) ) { // add to the existing set of Attributes w / this id
List existingIds = ( List ) ( categoryMap . get ( id ) ) ; existingIds . add ( attr ) ; } else { // this is the first Attr w / this id
List newIds = new ArrayList ( ) ; newIds . add ( attr ) ; categoryMap . put ( id , newIds ) ; } } } |
public class DefaultDispatchChallengeHandler { /** * Tokenize a given string assuming it is like a URL .
* @ param s the string to be parsed as a wildcard - able URI
* @ return the array of tokens of URI parts .
* @ throws IllegalArgumentException when the string cannot be parsed as a wildcard - able URI */
List < Token < UriElement > > tokenize ( String s ) throws IllegalArgumentException { } } | if ( s == null || s . length ( ) == 0 ) { return new ArrayList < Token < UriElement > > ( ) ; } // Make sure if a scheme is not specified , we default one before we parse as a URI .
if ( ! SCHEME_URI_PATTERN . matcher ( s ) . matches ( ) ) { s = ( "http://" ) + s ; } // Parse as a URI
URI uri = URI . create ( s ) ; // Detect what the scheme is , if any .
List < Token < UriElement > > result = new ArrayList < Token < UriElement > > ( 10 ) ; String scheme = "http" ; if ( uri . getScheme ( ) != null ) { scheme = uri . getScheme ( ) ; } // A wildcard - ed hostname is parsed as an authority .
String host = uri . getHost ( ) ; String parsedPortFromAuthority = null ; String parsedUserInfoFromAuthority = null ; String userFromAuthority = null ; String passwordFromAuthority = null ; if ( host == null ) { String authority = uri . getAuthority ( ) ; if ( authority != null ) { host = authority ; int asteriskIdx = host . indexOf ( "@" ) ; if ( asteriskIdx >= 0 ) { parsedUserInfoFromAuthority = host . substring ( 0 , asteriskIdx ) ; host = host . substring ( asteriskIdx + 1 ) ; int colonIdx = parsedUserInfoFromAuthority . indexOf ( ":" ) ; if ( colonIdx >= 0 ) { userFromAuthority = parsedUserInfoFromAuthority . substring ( 0 , colonIdx ) ; passwordFromAuthority = parsedUserInfoFromAuthority . substring ( colonIdx + 1 ) ; } } int colonIdx = host . indexOf ( ":" ) ; if ( colonIdx >= 0 ) { parsedPortFromAuthority = host . substring ( colonIdx + 1 ) ; host = host . substring ( 0 , colonIdx ) ; } } else { throw new IllegalArgumentException ( "Hostname is required." ) ; } } // Split the host and reverse it for the tokenization .
List < String > hostParts = Arrays . asList ( host . split ( "\\." ) ) ; Collections . reverse ( hostParts ) ; for ( String hostPart : hostParts ) { result . add ( new Token < UriElement > ( hostPart , UriElement . HOST ) ) ; } if ( parsedPortFromAuthority != null ) { result . add ( new Token < UriElement > ( parsedPortFromAuthority , UriElement . PORT ) ) ; } else if ( uri . getPort ( ) > 0 ) { result . add ( new Token < UriElement > ( String . valueOf ( uri . getPort ( ) ) , UriElement . PORT ) ) ; } else if ( getDefaultPort ( scheme ) > 0 ) { result . add ( new Token < UriElement > ( String . valueOf ( getDefaultPort ( scheme ) ) , UriElement . PORT ) ) ; } if ( parsedUserInfoFromAuthority != null ) { if ( userFromAuthority != null ) { result . add ( new Token < UriElement > ( userFromAuthority , UriElement . USERINFO ) ) ; } if ( passwordFromAuthority != null ) { result . add ( new Token < UriElement > ( passwordFromAuthority , UriElement . USERINFO ) ) ; } if ( userFromAuthority == null && passwordFromAuthority == null ) { result . add ( new Token < UriElement > ( parsedUserInfoFromAuthority , UriElement . USERINFO ) ) ; } } else if ( uri . getUserInfo ( ) != null ) { String userInfo = uri . getUserInfo ( ) ; int colonIdx = userInfo . indexOf ( ":" ) ; if ( colonIdx >= 0 ) { result . add ( new Token < UriElement > ( userInfo . substring ( 0 , colonIdx ) , UriElement . USERINFO ) ) ; result . add ( new Token < UriElement > ( userInfo . substring ( colonIdx + 1 ) , UriElement . USERINFO ) ) ; } else { result . add ( new Token < UriElement > ( uri . getUserInfo ( ) , UriElement . USERINFO ) ) ; } } if ( isNotBlank ( uri . getPath ( ) ) ) { String path = uri . getPath ( ) ; if ( path . startsWith ( "/" ) ) { path = path . substring ( 1 ) ; } if ( isNotBlank ( path ) ) { for ( String p : path . split ( "/" ) ) { result . add ( new Token < UriElement > ( p , UriElement . PATH ) ) ; } } } return result ; |
public class DelegatedLoopResources { /** * Creates new loop resources for server side .
* @ param workerGroup worker pool
* @ return loop resources */
public static DelegatedLoopResources newServerLoopResources ( EventLoopGroup workerGroup ) { } } | EventLoopGroup bossGroup = Epoll . isAvailable ( ) ? new EpollEventLoopGroup ( BOSS_THREADS_NUM , BOSS_THREAD_FACTORY ) : new NioEventLoopGroup ( BOSS_THREADS_NUM , BOSS_THREAD_FACTORY ) ; return new DelegatedLoopResources ( bossGroup , workerGroup ) ; |
public class AudioParametersMarshaller { /** * Marshall the given parameter object . */
public void marshall ( AudioParameters audioParameters , ProtocolMarshaller protocolMarshaller ) { } } | if ( audioParameters == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( audioParameters . getCodec ( ) , CODEC_BINDING ) ; protocolMarshaller . marshall ( audioParameters . getSampleRate ( ) , SAMPLERATE_BINDING ) ; protocolMarshaller . marshall ( audioParameters . getBitRate ( ) , BITRATE_BINDING ) ; protocolMarshaller . marshall ( audioParameters . getChannels ( ) , CHANNELS_BINDING ) ; protocolMarshaller . marshall ( audioParameters . getAudioPackingMode ( ) , AUDIOPACKINGMODE_BINDING ) ; protocolMarshaller . marshall ( audioParameters . getCodecOptions ( ) , CODECOPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PatternStream { /** * Applies a flat select function to the detected pattern sequence . For each pattern sequence
* the provided { @ link PatternFlatSelectFunction } is called . The pattern flat select function
* can produce an arbitrary number of resulting elements .
* @ param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence .
* @ param < R > Type of the resulting elements
* @ return { @ link DataStream } which contains the resulting elements from the pattern flat select
* function . */
public < R > SingleOutputStreamOperator < R > flatSelect ( final PatternFlatSelectFunction < T , R > patternFlatSelectFunction ) { } } | // we have to extract the output type from the provided pattern selection function manually
// because the TypeExtractor cannot do that if the method is wrapped in a MapFunction
final TypeInformation < R > outTypeInfo = TypeExtractor . getUnaryOperatorReturnType ( patternFlatSelectFunction , PatternFlatSelectFunction . class , 0 , 1 , new int [ ] { 1 , 0 } , builder . getInputType ( ) , null , false ) ; return flatSelect ( patternFlatSelectFunction , outTypeInfo ) ; |
public class QuantilesCallback { /** * When a split is stopped , if buckets have been initialized , the value
* is added to appropriate bucket . */
@ Override public void onStopwatchStop ( Split split , StopwatchSample sample ) { } } | onStopwatchSplit ( split . getStopwatch ( ) , split ) ; |
public class AbstractTransactionalOperationBasedList { @ Override public Iterator < E > iterator ( DelegatingList < E > delegatingList ) { } } | return new IteratorWithRemoveHandler ( resolveOperation ( delegatingList ) . iterator ( ) ) ; |
public class UCharacter { /** * Same as { @ link # getPropertyValueEnum ( int , CharSequence ) } , except doesn ' t throw exception . Instead , returns UProperty . UNDEFINED .
* @ param property Same as { @ link # getPropertyValueEnum ( int , CharSequence ) }
* @ param valueAlias Same as { @ link # getPropertyValueEnum ( int , CharSequence ) }
* @ return returns UProperty . UNDEFINED if the value is not valid , otherwise the value .
* @ deprecated This API is ICU internal only .
* @ hide original deprecated declaration
* @ hide draft / provisional / internal are hidden on Android */
@ Deprecated public static int getPropertyValueEnumNoThrow ( int property , CharSequence valueAlias ) { } } | return UPropertyAliases . INSTANCE . getPropertyValueEnumNoThrow ( property , valueAlias ) ; |
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link ServiceInvocationException } with the given { @ link Throwable cause }
* and { @ link String message } formatted with the given { @ link Object [ ] arguments } .
* @ param cause { @ link Throwable } identified as the reason this { @ link ServiceInvocationException } was thrown .
* @ param message { @ link String } describing the { @ link ServiceInvocationException exception } .
* @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } .
* @ return a new { @ link ServiceInvocationException } with the given { @ link Throwable cause } and { @ link String message } .
* @ see org . cp . elements . service . ServiceInvocationException */
public static ServiceInvocationException newServiceInvocationException ( Throwable cause , String message , Object ... args ) { } } | return new ServiceInvocationException ( format ( message , args ) , cause ) ; |
public class VortexWorker { /** * Executes an aggregation request from the { @ link org . apache . reef . vortex . driver . VortexDriver } . */
private void executeAggregateTasklet ( final ExecutorService commandExecutor , final MasterToWorkerRequest masterToWorkerRequest ) { } } | final TaskletAggregateExecutionRequest taskletAggregateExecutionRequest = ( TaskletAggregateExecutionRequest ) masterToWorkerRequest ; assert aggregates . containsKey ( taskletAggregateExecutionRequest . getAggregateFunctionId ( ) ) ; final AggregateContainer aggregateContainer = aggregates . get ( taskletAggregateExecutionRequest . getAggregateFunctionId ( ) ) ; final TaskletAggregationRequest aggregationRequest = aggregateContainer . getTaskletAggregationRequest ( ) ; commandExecutor . submit ( new Runnable ( ) { @ Override public void run ( ) { try { aggregateContainer . scheduleTasklet ( taskletAggregateExecutionRequest . getTaskletId ( ) ) ; final Object result = aggregationRequest . executeFunction ( taskletAggregateExecutionRequest . getInput ( ) ) ; aggregateContainer . taskletComplete ( taskletAggregateExecutionRequest . getTaskletId ( ) , result ) ; } catch ( final Exception e ) { aggregateContainer . taskletFailed ( taskletAggregateExecutionRequest . getTaskletId ( ) , e ) ; } } } ) ; |
public class ArbitrateFactory { /** * 指定对应的pipelineId , 获取对应的仲裁资源类型 < br / >
* 要求对应的instanceClass , 都必须支持以pipelineId做为唯一参数的构造函数 */
public static < T extends ArbitrateLifeCycle > T getInstance ( Long pipelineId , Class < T > instanceClass ) { } } | // Map < Class , Object > resources = cache . get ( pipelineId ) ;
// if ( resources = = null ) {
// synchronized ( cache ) { / / 锁住一下
// if ( ! cache . containsKey ( pipelineId ) ) { / / double check
// resources = new ConcurrentHashMap < Class , Object > ( ) ;
// cache . put ( pipelineId , resources ) ;
// } else {
// resources = cache . get ( pipelineId ) ;
// Object obj = resources . get ( instanceClass ) ;
// if ( obj = = null ) {
// synchronized ( instanceClass ) { / / 锁住class对象
// if ( ! resources . containsKey ( instanceClass ) ) { / / double check
// obj = newInstance ( instanceClass , pipelineId ) ; / / 通过反射调用构造函数进行初始化
// autowire ( obj ) ;
// resources . put ( instanceClass , obj ) ;
// } else {
// obj = resources . get ( instanceClass ) ;
// return ( T ) obj ;
return ( T ) cache . get ( pipelineId ) . get ( instanceClass ) ; |
public class DirectoryLookupService { /** * Remove the NotificationHandler from the Service .
* @ param serviceName the service name .
* @ param handler the NotificationHandler for the service . */
public void removeNotificationHandler ( String serviceName , NotificationHandler handler ) { } } | ServiceInstanceUtils . validateServiceName ( serviceName ) ; if ( handler == null ) { throw new ServiceException ( ErrorCode . SERVICE_DIRECTORY_NULL_ARGUMENT_ERROR , ErrorCode . SERVICE_DIRECTORY_NULL_ARGUMENT_ERROR . getMessageTemplate ( ) , "NotificationHandler" ) ; } List < InstanceChangeListener < ModelServiceInstance > > list = changeListenerMap . get ( serviceName ) ; if ( list != null ) { boolean found = false ; for ( InstanceChangeListener < ModelServiceInstance > listener : list ) { if ( listener instanceof NotificationHandlerAdapter && ( ( NotificationHandlerAdapter ) listener ) . getAdapter ( ) == handler ) { list . remove ( listener ) ; found = true ; break ; } } if ( ! found ) { LOGGER . error ( ErrorCode . NOTIFICATION_HANDLER_DOES_NOT_EXIST . getMessageTemplate ( ) ) ; } } else { LOGGER . error ( String . format ( ErrorCode . SERVICE_DOES_NOT_EXIST . getMessageTemplate ( ) , serviceName ) ) ; } |
public class PAbstractObject { /** * Get a property as an int or default value .
* @ param key the property name
* @ param defaultValue the default value */
@ Override public final Integer optInt ( final String key , final Integer defaultValue ) { } } | Integer result = optInt ( key ) ; return result == null ? defaultValue : result ; |
public class PrimaveraReader { /** * Configure a new user defined field .
* @ param fieldType field type
* @ param dataType field data type
* @ param name field name */
private void addUserDefinedField ( FieldTypeClass fieldType , UserFieldDataType dataType , String name ) { } } | try { switch ( fieldType ) { case TASK : TaskField taskField ; do { taskField = m_taskUdfCounters . nextField ( TaskField . class , dataType ) ; } while ( m_taskFields . containsKey ( taskField ) || m_wbsFields . containsKey ( taskField ) ) ; m_project . getCustomFields ( ) . getCustomField ( taskField ) . setAlias ( name ) ; break ; case RESOURCE : ResourceField resourceField ; do { resourceField = m_resourceUdfCounters . nextField ( ResourceField . class , dataType ) ; } while ( m_resourceFields . containsKey ( resourceField ) ) ; m_project . getCustomFields ( ) . getCustomField ( resourceField ) . setAlias ( name ) ; break ; case ASSIGNMENT : AssignmentField assignmentField ; do { assignmentField = m_assignmentUdfCounters . nextField ( AssignmentField . class , dataType ) ; } while ( m_assignmentFields . containsKey ( assignmentField ) ) ; m_project . getCustomFields ( ) . getCustomField ( assignmentField ) . setAlias ( name ) ; break ; default : break ; } } catch ( Exception ex ) { // SF # 227 : If we get an exception thrown here . . . it ' s likely that
// we ' ve run out of user defined fields , for example
// there are only 30 TEXT fields . We ' ll ignore this : the user
// defined field won ' t be mapped to an alias , so we ' ll
// ignore it when we read in the values .
} |
public class GeneralValidator { /** * Sets teh transformers transforming the output of each data provider before they are mapped to the rules .
* @ param dataProviderOutputTransformers Data provider output transformers . */
public void setDataProviderOutputTransformers ( Collection < Transformer > dataProviderOutputTransformers ) { } } | if ( dataProviderOutputTransformers == null ) { this . dataProviderOutputTransformers = null ; } else { this . dataProviderOutputTransformers = new ArrayList < Transformer > ( ) ; this . dataProviderOutputTransformers . addAll ( dataProviderOutputTransformers ) ; } |
public class ArrayContainer { /** * the illegal container does not return it . */
private void increaseCapacity ( boolean allowIllegalSize ) { } } | int newCapacity = ( this . content . length == 0 ) ? DEFAULT_INIT_SIZE : this . content . length < 64 ? this . content . length * 2 : this . content . length < 1067 ? this . content . length * 3 / 2 : this . content . length * 5 / 4 ; // never allocate more than we will ever need
if ( newCapacity > ArrayContainer . DEFAULT_MAX_SIZE && ! allowIllegalSize ) { newCapacity = ArrayContainer . DEFAULT_MAX_SIZE ; } // if we are within 1/16th of the max , go to max
if ( newCapacity > ArrayContainer . DEFAULT_MAX_SIZE - ArrayContainer . DEFAULT_MAX_SIZE / 16 && ! allowIllegalSize ) { newCapacity = ArrayContainer . DEFAULT_MAX_SIZE ; } this . content = Arrays . copyOf ( this . content , newCapacity ) ; |
public class DivSufSort { /** * special version of ss _ compare for handling
* < code > ss _ compare ( T , & ( PAi [ 0 ] ) , PA + * a , depth ) < / code > situation . */
private int ssCompare ( int pa , int pb , int p2 , int depth ) { } } | int U1 , U2 , U1n , U2n ; // pointers to T
for ( U1 = depth + pa , U2 = depth + SA [ p2 ] , U1n = pb + 2 , U2n = SA [ p2 + 1 ] + 2 ; ( U1 < U1n ) && ( U2 < U2n ) && ( T [ start + U1 ] == T [ start + U2 ] ) ; ++ U1 , ++ U2 ) { } return U1 < U1n ? ( U2 < U2n ? T [ start + U1 ] - T [ start + U2 ] : 1 ) : ( U2 < U2n ? - 1 : 0 ) ; |
public class ObjectsApi { /** * Get DNs by DBIDs .
* Get DNs ( directory numbers ) or agent groups from Configuration Server with the specified DBIDs .
* @ param DBIDs List of DBIDs of Dns to be fetched . ( required )
* @ return Results object which includes list of Dns with the specified DBIDs and total count .
* @ throws ProvisioningApiException if the call is unsuccessful . */
public Results < Dn > getDnsByDBIDs ( List < String > DBIDs ) throws ProvisioningApiException { } } | return getDnsByDBIDs ( null , null , null , null , null , null , null , DBIDs ) ; |
public class SlackActivity { /** * The source of Slack message can be either Json asset or process variable . */
public JSONObject getMessage ( ) throws ActivityException { } } | String message = null ; String slackMessageName = getAttributeValueSmart ( SLACK_MESSAGE ) ; if ( slackMessageName == null ) throw new ActivityException ( "slack message attribute is not set" ) ; Asset template = AssetCache . getAsset ( slackMessageName ) ; if ( template == null ) { message = slackMessageName ; } else { message = context . evaluateToString ( template . getStringContent ( ) ) ; } JSONObject json = new JSONObject ( ) ; String env = ApplicationContext . getRuntimeEnvironment ( ) . toUpperCase ( ) ; json . put ( "text" , env + " - " + getSlackPrefix ( ) + " - " + message ) ; String altText = null ; if ( json . has ( "text" ) ) { String text = json . getString ( "text" ) ; if ( text . length ( ) > 200 ) altText = text . substring ( 0 , 197 ) + "..." ; } if ( altText != null ) json . put ( "text" , altText ) ; return json ; |
public class Promises { /** * Picks the first { @ code Promise } that was completed without exception .
* @ see Promises # first ( BiPredicate , Iterator ) */
@ NotNull @ SafeVarargs public static < T > Promise < T > firstSuccessful ( AsyncSupplier < ? extends T > ... promises ) { } } | return first ( isResult ( ) , promises ) ; |
public class WebXmlScannerPlugin { /** * Create a filter mapping descriptor .
* @ param filterMappingType
* The XML filter mapping type .
* @ param servlets
* The map of known servlets .
* @ param store
* The store .
* @ return The filter mapping descriptor . */
private FilterMappingDescriptor createFilterMapping ( FilterMappingType filterMappingType , Map < String , FilterDescriptor > filters , Map < String , ServletDescriptor > servlets , Store store ) { } } | FilterMappingDescriptor filterMappingDescriptor = store . create ( FilterMappingDescriptor . class ) ; FilterNameType filterName = filterMappingType . getFilterName ( ) ; FilterDescriptor filterDescriptor = getOrCreateNamedDescriptor ( FilterDescriptor . class , filterName . getValue ( ) , filters , store ) ; filterDescriptor . getMappings ( ) . add ( filterMappingDescriptor ) ; for ( Object urlPatternOrServletName : filterMappingType . getUrlPatternOrServletName ( ) ) { if ( urlPatternOrServletName instanceof UrlPatternType ) { UrlPatternType urlPatternType = ( UrlPatternType ) urlPatternOrServletName ; UrlPatternDescriptor urlPatternDescriptor = createUrlPattern ( urlPatternType , store ) ; filterMappingDescriptor . getUrlPatterns ( ) . add ( urlPatternDescriptor ) ; } else if ( urlPatternOrServletName instanceof ServletNameType ) { ServletNameType servletNameType = ( ServletNameType ) urlPatternOrServletName ; ServletDescriptor servletDescriptor = getOrCreateNamedDescriptor ( ServletDescriptor . class , servletNameType . getValue ( ) , servlets , store ) ; filterMappingDescriptor . setServlet ( servletDescriptor ) ; } } for ( DispatcherType dispatcherType : filterMappingType . getDispatcher ( ) ) { DispatcherDescriptor dispatcherDescriptor = store . create ( DispatcherDescriptor . class ) ; dispatcherDescriptor . setValue ( dispatcherType . getValue ( ) ) ; filterMappingDescriptor . getDispatchers ( ) . add ( dispatcherDescriptor ) ; } return filterMappingDescriptor ; |
public class CssScanner { /** * With incoming builder containing a valid NUMBER , and next char being a
* valid QNTSTART , modify the type and append to the builder */
private void _quantity ( ) throws IOException , CssException { } } | if ( debug ) { int ch = reader . peek ( ) ; checkState ( QNTSTART . matches ( ( char ) ch ) || isNextEscape ( ) ) ; checkState ( builder . getLength ( ) > 0 && NUM . matches ( builder . getLast ( ) ) ) ; } /* * Assume we have a { num } { ident } instance ( DIMEN ) , and then override
* that if a specific quantity literal is found . */
builder . type = Type . QNTY_DIMEN ; TokenBuilder suffix = new TokenBuilder ( reader , errHandler , locale ) ; append ( QNTSTART , suffix ) ; if ( suffix . getLast ( ) != '%' ) { // QNTSTART = NMSTART | ' % '
append ( NMCHAR , suffix ) ; } if ( suffix . getLength ( ) > QNT_TOKEN_MAXLENGTH ) { // longer than max length in quantities map
builder . append ( suffix . toString ( ) ) ; return ; } // shorter or equal to max length in quantities map
// we might have a more specific match
final int [ ] ident = suffix . toArray ( ) ; int [ ] match = null ; for ( int [ ] test : quantities . keySet ( ) ) { if ( equals ( ident , test , true ) ) { builder . type = quantities . get ( test ) ; match = test ; break ; } } if ( builder . type == Type . QNTY_DIMEN ) { builder . append ( ident ) ; } else { if ( debug ) { checkState ( match != null ) ; } builder . append ( match ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.