signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SQLWhere { /** * Append SQL . * @ param _ tablePrefix the table prefix * @ param _ cmd the cmd */ protected void appendSQL ( final String _tablePrefix , final StringBuilder _cmd ) { } }
if ( sections . size ( ) > 0 ) { if ( isStarted ( ) ) { new SQLSelectPart ( SQLPart . AND ) . appendSQL ( _cmd ) ; new SQLSelectPart ( SQLPart . SPACE ) . appendSQL ( _cmd ) ; } else { new SQLSelectPart ( SQLPart . WHERE ) . appendSQL ( _cmd ) ; new SQLSelectPart ( SQLPart . SPACE ) . appendSQL ( _cmd ) ; } addSectionsSQL ( _tablePrefix , _cmd , sections ) ; }
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public ImageEncodingRECID createImageEncodingRECIDFromString ( EDataType eDataType , String initialValue ) { } }
ImageEncodingRECID result = ImageEncodingRECID . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class NormalisedOntology { /** * Processes the axioms in normal form 1 from a set of axioms added * incrementally and does the following : * < ol > * < li > Adds the axioms to the local map . < / li > * < li > Calculates the new query entries derived from the addition of these * axioms . < / li > * < li > Adds query entries to corresponding contexts and activates them . < / li > * < / ol > * @ param as * The set of axioms added incrementally . */ private void rePrimeNF1 ( AxiomSet as , IConceptMap < IConceptSet > subsumptions ) { } }
// NF1 . A1 + . . . + X + . . . + An [ B // Q ( A ) + = { A1 + . . . + An - > B } , for all X in S ( A ) // Want the set < x , a > such that < x , a > in S and exists c such that // < a , c > in deltaOntologyNF1QueueEntries that is , we want to join S and // deltaOntologyNF1QueueEntries on S . col2 and // deltaOntologyNF1QueueEntries . key int size = as . getNf1aAxioms ( ) . size ( ) + as . getNf1bAxioms ( ) . size ( ) ; if ( size == 0 ) return ; IConceptMap < MonotonicCollection < IConjunctionQueueEntry > > deltaNF1 = new SparseConceptMap < MonotonicCollection < IConjunctionQueueEntry > > ( size ) ; for ( NF1a nf1a : as . getNf1aAxioms ( ) ) { IConjunctionQueueEntry qe = nf1a . getQueueEntry ( ) ; addTerms ( deltaNF1 , nf1a . lhsA ( ) , qe ) ; } for ( NF1b nf1b : as . getNf1bAxioms ( ) ) { final int a1 = nf1b . lhsA1 ( ) ; final int a2 = nf1b . lhsA2 ( ) ; addTerms ( deltaNF1 , a1 , nf1b . getQueueEntry1 ( ) ) ; addTerms ( deltaNF1 , a2 , nf1b . getQueueEntry2 ( ) ) ; } // Get all the subsumptions a [ x for ( final IntIterator aItr = subsumptions . keyIterator ( ) ; aItr . hasNext ( ) ; ) { final int a = aItr . next ( ) ; final IConceptSet Sa = subsumptions . get ( a ) ; for ( final IntIterator xItr = Sa . iterator ( ) ; xItr . hasNext ( ) ; ) { final int x = xItr . next ( ) ; // If any of the new axioms is of the form x [ y then add // an entry if ( deltaNF1 . containsKey ( x ) ) { final IMonotonicCollection < IConjunctionQueueEntry > set = deltaNF1 . get ( x ) ; for ( final IConjunctionQueueEntry entry : set ) { // Add to corresponding context and activate Context ctx = contextIndex . get ( a ) ; ctx . addConceptQueueEntry ( entry ) ; affectedContexts . add ( ctx ) ; ctx . startTracking ( ) ; if ( ctx . activate ( ) ) { todo . add ( ctx ) ; } } } } }
public class BitMarketAdapters { /** * Adapts BitMarket ticker to Ticker . * @ param bitMarketTicker * @ param currencyPair * @ return */ public static Ticker adaptTicker ( BitMarketTicker bitMarketTicker , CurrencyPair currencyPair ) { } }
BigDecimal bid = bitMarketTicker . getBid ( ) ; BigDecimal ask = bitMarketTicker . getAsk ( ) ; BigDecimal high = bitMarketTicker . getHigh ( ) ; BigDecimal low = bitMarketTicker . getLow ( ) ; BigDecimal volume = bitMarketTicker . getVolume ( ) ; BigDecimal vwap = bitMarketTicker . getVwap ( ) ; BigDecimal last = bitMarketTicker . getLast ( ) ; return new Ticker . Builder ( ) . currencyPair ( currencyPair ) . last ( last ) . bid ( bid ) . ask ( ask ) . high ( high ) . low ( low ) . volume ( volume ) . vwap ( vwap ) . build ( ) ;
public class Logger { /** * Opens the specified Database object ' s database files and starts up * the logging process . < p > * If the specified Database object is a new database , its database * files are first created . * @ param db the Database * @ throws HsqlException if there is a problem , such as the case when * the specified files are in use by another process */ public void openLog ( Database db ) { } }
needsCheckpoint = false ; String path = db . getPath ( ) ; int loglevel = db . getProperties ( ) . getIntegerProperty ( HsqlDatabaseProperties . hsqldb_applog , 0 ) ; this . database = db ; if ( loglevel != SimpleLog . LOG_NONE ) { appLog = new SimpleLog ( path + ".app.log" , loglevel , ! db . isFilesReadOnly ( ) ) ; } appLog . sendLine ( SimpleLog . LOG_ERROR , "Database (re)opened" ) ; logStatements = false ; boolean useLock = db . getProperties ( ) . isPropertyTrue ( HsqlDatabaseProperties . hsqldb_lock_file ) ; if ( useLock && ! db . isFilesReadOnly ( ) ) { acquireLock ( path ) ; } log = new Log ( db ) ; log . open ( ) ; logsStatements = logStatements = ! db . isFilesReadOnly ( ) ;
public class RegistrationsApi { /** * Confirm User * This call updates the registration request issued earlier by associating it with an authenticated user and captures all additional information required to add a new device . * @ param registrationInfo Device Registration information . ( required ) * @ return DeviceRegConfirmUserResponseEnvelope * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public DeviceRegConfirmUserResponseEnvelope confirmUser ( DeviceRegConfirmUserRequest registrationInfo ) throws ApiException { } }
ApiResponse < DeviceRegConfirmUserResponseEnvelope > resp = confirmUserWithHttpInfo ( registrationInfo ) ; return resp . getData ( ) ;
public class AbstractResourceRepository { /** * Select the resource with the most appropriate version , based * on the standard service rules . */ protected File selectResource ( String baseLocation , final String symbolicName , final VersionRange versionRange , final boolean performURICheck , final boolean selectBaseBundle ) { } }
// If the baseLocation is null or the empty string use the default location . if ( baseLocation == null || "" . equals ( baseLocation ) ) { baseLocation = getDefaultBaseLocation ( ) ; } Set < String > baseLocationParts ; // If we have an exact location . . . if ( ! ! ! baseLocation . contains ( "," ) ) { File candidateFile ; if ( performURICheck ) { // . . . try to see if we have a URL ( actually not sure this is used ever ) . If it exists return it . try { candidateFile = new File ( URI . create ( baseLocation ) ) ; if ( candidateFile . exists ( ) && candidateFile . isFile ( ) && candidateFile . isAbsolute ( ) ) { return validateR4Bundle ( candidateFile ) ; } } catch ( IllegalArgumentException iae ) { // Just ignore , not a URI , and move on . } } // If it wasn ' t a URL then see if we can find it . If we do return it . If we don ' t we assume // the location is a directory and fall through to that processing . candidateFile = new File ( getRootDirectory ( ) , baseLocation ) ; if ( candidateFile . exists ( ) && candidateFile . isFile ( ) ) { return validateR4Bundle ( candidateFile ) ; } // Prime baseLocationParts with this single baseLocation . baseLocationParts = new HashSet < String > ( ) ; baseLocationParts . add ( baseLocation ) ; } else { // If we found a comma we assume we have a comma separated list of directories so split them up . baseLocationParts = new HashSet < String > ( Arrays . asList ( baseLocation . split ( "," ) ) ) ; } // Loop around all the locations ( well DUH ! ) . for ( String baseLocationPart : baseLocationParts ) { // Need to trim whitespace in case someone padded the locations in the source . baseLocationPart = baseLocationPart . trim ( ) ; // If we haven ' t seen this location before we need to include the directory looking at the resources in the directory . if ( ! ! ! isBaseLocationIncluded ( baseLocationPart ) ) { // Make sure we don ' t include again and then include the directory loading all missed resources into this object . includeBaseLocation ( baseLocationPart ) ; } } // Now we have loaded everything into memory we grab the candidates with the right symbolic name . If we // get nothing back then we will ultimately return a null file . List < Resource > candidateResources = getResourcesBySymbolicName ( symbolicName ) ; if ( candidateResources != null ) { // Sort the candidates ( using a sorted set might be more efficient , although // we aren ' t likely to often select the same resource multiple times ) . // As a result of calling the resource in our example are sorted in this following order : // ( First entry ) a . b / 1.0.2 . v2 , a . b / 1.0.1 . v1 , a . b / 1.0.1 ( Last entry ) Collections . sort ( candidateResources ) ; Resource bestMatch = null ; // Loop around all the candidates . for ( Resource candidateResource : candidateResources ) { // Make sure the candidate is in the baseLocations provided in the selection rules . If it isn ' t we don ' t // want it . We also don ' t want it if it doesn ' t match the version range passed in . If either of these // conditions are met we just skip . if ( ! ! ! baseLocationParts . contains ( candidateResource . getBaseLocation ( ) ) || ! ! ! versionRange . includes ( candidateResource . getVersion ( ) ) ) { continue ; } // If the best match is null this is the first possible match . if ( bestMatch == null ) { // If we ' ve got a standard resource here and isn ' t an IFix then we need to just take the // top entry , which will give us the highest versioned resource . if ( ! ! ! candidateResource . isFix ( ) ) { return candidateResource . getFile ( ) ; // resource is a basebundle , no action needed for selectBaseBundle } else { // Save the best match , but we need to keep going to make sure the base resource can be found . bestMatch = candidateResource ; // Kernel . boot jar when ifixed has no base bundle as the jar keeps the same name due to tooling // having the jar by name on manifest classpaths . Therefore for this jar there will only be a // single match so return as soon as we have it . if ( "com.ibm.ws.kernel.boot" . equals ( symbolicName ) ) { return bestMatch . getFile ( ) ; } } } else { // Fetching here because in the error case we will need to re - fetch . int bestMatchMajor = bestMatch . getVersion ( ) . getMajor ( ) ; int bestMatchMinor = bestMatch . getVersion ( ) . getMinor ( ) ; int bestMatchMicro = bestMatch . getVersion ( ) . getMicro ( ) ; // If the major , minor and micro exist then this is a resource on the same base level ( could be another // but older IFix , or the base resource , but it has to be one of them ) . if ( bestMatchMajor == candidateResource . getVersion ( ) . getMajor ( ) && bestMatchMinor == candidateResource . getVersion ( ) . getMinor ( ) && bestMatchMicro == candidateResource . getVersion ( ) . getMicro ( ) ) { // If we hit something that is not an IFix then we have a valid best IFix so we return it . if ( ! ! ! candidateResource . isFix ( ) ) { // match was for baseBundle for an ifix , return appropriately . return selectBaseBundle ? candidateResource . getFile ( ) : bestMatch . getFile ( ) ; } } else { // If we got here then the previous best match did not have a base resource installed . So we // need to output a warning and move on . This message makes an assumption about the way the // resources are named that might not hold , but given no base resource exists we have to make an assumption . warnThatAnIFixWasIgnored ( bestMatch . getFile ( ) . getName ( ) , bestMatch . getSymbolicName ( ) , bestMatchMajor , bestMatchMinor , bestMatchMicro ) ; // If we can ' t find an existing match resource for the IFix , then we still need to try and load // a non - IFix version of the resource , if one exists . if ( ! ! ! candidateResource . isFix ( ) ) { return candidateResource . getFile ( ) ; // not ifix , no action needed for selectBaseBundle . } else { bestMatch = candidateResource ; } } } } // If we get here and we have a best match that is not an ifix , we need to return the file for it . // This should not happen , as we only store iFixes into bestMatch . . if ( bestMatch != null && ! ! ! bestMatch . isFix ( ) ) { // not an ifix , so no need to handle selectBaseBundle return bestMatch . getFile ( ) ; } } // If we get here we didn ' t find a match so just return null . return null ;
public class BookKeeperCommand { /** * Outputs a summary for the given Log . * @ param logId The Log Id . * @ param m The Log Metadata for the given Log Id . */ protected void outputLogSummary ( int logId , ReadOnlyLogMetadata m ) { } }
if ( m == null ) { output ( "Log %d: No metadata." , logId ) ; } else { output ( "Log %d: Epoch=%d, Version=%d, Enabled=%s, Ledgers=%d, Truncation={%s}" , logId , m . getEpoch ( ) , m . getUpdateVersion ( ) , m . isEnabled ( ) , m . getLedgers ( ) . size ( ) , m . getTruncationAddress ( ) ) ; }
public class CmsPropertiesEntryPoint { /** * Returns to the close link after a short delay . < p > */ void closeDelayed ( ) { } }
Scheduler . RepeatingCommand command = new Scheduler . RepeatingCommand ( ) { public boolean execute ( ) { if ( CmsErrorDialog . isShowingErrorDialogs ( ) ) { return true ; } else { Window . Location . assign ( m_closeLink ) ; return false ; } } } ; Scheduler . get ( ) . scheduleFixedDelay ( command , 300 ) ;
public class Model { /** * Get attribute of mysql type : int , integer , tinyint ( n ) n > 1 , smallint , mediumint */ public Integer getInt ( String attr ) { } }
Number n = ( Number ) attrs . get ( attr ) ; return n != null ? n . intValue ( ) : null ;
public class SuiteDeployer { /** * Undeploy event . * @ param event event to observe */ public void undeploy ( @ Observes final BeforeStop event ) { } }
if ( extensionEnabled ( ) ) { debug ( "Catching BeforeStop event {0}" , event . toString ( ) ) ; undeployDeployments = true ; undeployEvent . fire ( new UnDeployManagedDeployments ( ) ) ; }
public class RedBlackTreeLong { /** * value associated with the given key in subtree rooted at x ; null if no such key */ private Node < T > get ( Node < T > x , long key ) { } }
while ( x != null ) { if ( x . value == key ) return x ; if ( key < x . value ) x = x . left ; else x = x . right ; } return null ;
public class AccountsInner { /** * Gets the first page of Azure Storage accounts , if any , linked to the specified Data Lake Analytics account . The response includes a link to the next page , if any . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; StorageAccountInfoInner & gt ; object */ public Observable < Page < StorageAccountInfoInner > > listStorageAccountsNextAsync ( final String nextPageLink ) { } }
return listStorageAccountsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < StorageAccountInfoInner > > , Page < StorageAccountInfoInner > > ( ) { @ Override public Page < StorageAccountInfoInner > call ( ServiceResponse < Page < StorageAccountInfoInner > > response ) { return response . body ( ) ; } } ) ;
public class CondVar { /** * Blocks until condition is true . * @ param condition The condition . Must be non - null */ public void waitFor ( Condition condition ) { } }
boolean intr = false ; lock . lock ( ) ; try { while ( ! condition . isMet ( ) ) { try { cond . await ( ) ; } catch ( InterruptedException e ) { intr = true ; } } } finally { lock . unlock ( ) ; if ( intr ) Thread . currentThread ( ) . interrupt ( ) ; }
public class CrossValidationRecSysEvaluator { /** * Evaluate predictions using an evaluation model against the test set . * @ param splitPath path where splits have been stored * @ param strategyModelPath path where strategy model files have been stored * @ return evaluation metrics */ public EvaluationMetrics evaluate ( final String splitPath , final String strategyModelPath ) throws IOException { } }
EvaluationMetrics results = new EvaluationMetrics ( ) ; double rmseResult = 0.0 ; double maeResult = 0.0 ; for ( int cutoff : this . cutoffs ) { double ndcgRes = 0.0 ; double precisionRes = 0.0 ; double recallRes = 0.0 ; for ( int i = 0 ; i < this . numFolds ; i ++ ) { File testFile = new File ( Paths . get ( splitPath , "test_" + i + FILE_EXT ) . toString ( ) ) ; File strategyFile = new File ( Paths . get ( strategyModelPath , "strategymodel_" + i + FILE_EXT ) . toString ( ) ) ; DataModelIF < Long , Long > testModel = new SimpleParser ( ) . parseData ( testFile ) ; DataModelIF < Long , Long > strategyModel = new SimpleParser ( ) . parseData ( strategyFile ) ; // Error metrics calculated only once per fold , using all predictions if ( cutoff == this . cutoffs [ 0 ] ) { RMSE < Long , Long > rmse = new RMSE < > ( strategyModel , testModel ) ; rmse . compute ( ) ; rmseResult += rmse . getValue ( ) ; MAE < Long , Long > mae = new MAE < > ( strategyModel , testModel ) ; mae . compute ( ) ; maeResult += mae . getValue ( ) ; } // Ranking metrics NDCG < Long , Long > ndcg = new NDCG < > ( strategyModel , testModel , new int [ ] { cutoff } ) ; ndcg . compute ( ) ; ndcgRes += ndcg . getValueAt ( cutoff ) ; Precision < Long , Long > precision = new Precision < > ( strategyModel , testModel , this . relevanceThreshold , new int [ ] { cutoff } ) ; precision . compute ( ) ; precisionRes += precision . getValueAt ( cutoff ) ; Recall < Long , Long > recall = new Recall < > ( strategyModel , testModel , this . relevanceThreshold , new int [ ] { cutoff } ) ; recall . compute ( ) ; recallRes += recall . getValueAt ( cutoff ) ; } results . setPrecisionAtK ( cutoff , precisionRes / this . numFolds ) ; results . setRecallAtK ( cutoff , recallRes / this . numFolds ) ; results . setNDCGAtK ( cutoff , ndcgRes / this . numFolds ) ; log . info ( "Ranking metrics at {} computed" , cutoff ) ; log . info ( "NDCG@" + cutoff + ": " + ndcgRes / this . numFolds + " / P@" + cutoff + ": " + precisionRes / this . numFolds + " / R@" + cutoff + ": " + recallRes / this . numFolds ) ; } results . setMAE ( maeResult / this . numFolds ) ; results . setRMSE ( rmseResult / this . numFolds ) ; log . info ( "Error metrics computed" ) ; log . info ( "RMSE: " + rmseResult / this . numFolds + " / MAE: " + maeResult / this . numFolds ) ; return results ;
public class HttpClient { /** * Used by Entity - Enclosing HTTP Requests to send Name - Value pairs in the body of the request * @ param param Name of Parameter * @ param value Value of Parameter * @ return */ public final HttpClient addNameValuePair ( final String param , final String value ) { } }
nameValuePairs . add ( new BasicNameValuePair ( param , value ) ) ; return this ;
public class GVRScriptManager { /** * Binds a script bundle to a scene . * @ param scriptBundle * The { @ code GVRScriptBundle } object containing script binding information . * @ param scene * The scene to bind to . * @ throws IOException if script bundle file cannot be read . * @ throws GVRScriptException if script processing error occurs . */ public void bindScriptBundleToScene ( GVRScriptBundle scriptBundle , GVRScene scene ) throws IOException , GVRScriptException { } }
for ( GVRSceneObject sceneObject : scene . getSceneObjects ( ) ) { bindBundleToSceneObject ( scriptBundle , sceneObject ) ; }
public class HeidelTime { /** * Identify the part of speech ( POS ) of a MarchResult . * @ param tokBegin * @ param tokEnd * @ param s * @ param jcas * @ return */ public String getPosFromMatchResult ( int tokBegin , int tokEnd , Sentence s , JCas jcas ) { } }
// get all tokens in sentence HashMap < Integer , Token > hmTokens = new HashMap < Integer , Token > ( ) ; FSIterator iterTok = jcas . getAnnotationIndex ( Token . type ) . subiterator ( s ) ; while ( iterTok . hasNext ( ) ) { Token token = ( Token ) iterTok . next ( ) ; hmTokens . put ( token . getBegin ( ) , token ) ; } // get correct token String pos = "" ; if ( hmTokens . containsKey ( tokBegin ) ) { Token tokenToCheck = hmTokens . get ( tokBegin ) ; pos = tokenToCheck . getPos ( ) ; } return pos ;
public class AmazonDynamoDBAsyncClient { /** * Creates a new item , or replaces an old item with a new item ( including * all the attributes ) . * If an item already exists in the specified table with the same primary * key , the new item completely replaces the existing item . You can * perform a conditional put ( insert a new item if one with the specified * primary key doesn ' t exist ) , or replace an existing item if it has * certain attribute values . * @ param putItemRequest Container for the necessary parameters to * execute the PutItem operation on AmazonDynamoDB . * @ param asyncHandler Asynchronous callback handler for events in the * life - cycle of the request . Users could provide the implementation of * the four callback methods in this interface to process the operation * result or handle the exception . * @ return A Java Future object containing the response from the PutItem * service method , as returned by AmazonDynamoDB . * @ throws AmazonClientException * If any internal errors are encountered inside the client while * attempting to make the request or handle the response . For example * if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonDynamoDB indicating * either a problem with the data in the request , or a server side issue . */ public Future < PutItemResult > putItemAsync ( final PutItemRequest putItemRequest , final AsyncHandler < PutItemRequest , PutItemResult > asyncHandler ) throws AmazonServiceException , AmazonClientException { } }
return executorService . submit ( new Callable < PutItemResult > ( ) { public PutItemResult call ( ) throws Exception { PutItemResult result ; try { result = putItem ( putItemRequest ) ; } catch ( Exception ex ) { asyncHandler . onError ( ex ) ; throw ex ; } asyncHandler . onSuccess ( putItemRequest , result ) ; return result ; } } ) ;
public class AppsImpl { /** * Gets the query logs of the past month for the application . * @ param appId The application ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the InputStream object */ public Observable < InputStream > downloadQueryLogsAsync ( UUID appId ) { } }
return downloadQueryLogsWithServiceResponseAsync ( appId ) . map ( new Func1 < ServiceResponse < InputStream > , InputStream > ( ) { @ Override public InputStream call ( ServiceResponse < InputStream > response ) { return response . body ( ) ; } } ) ;
public class BeanDefinitionWriter { /** * Visits the constructor used to create the bean definition . * @ param annotationMetadata The annotation metadata for the constructor * @ param requiresReflection Whether invoking the constructor requires reflection * @ param argumentTypes The argument type names for each parameter * @ param argumentAnnotationMetadata The qualifier type names for each parameter * @ param genericTypes The generic types for each parameter */ @ Override public void visitBeanDefinitionConstructor ( AnnotationMetadata annotationMetadata , boolean requiresReflection , Map < String , Object > argumentTypes , Map < String , AnnotationMetadata > argumentAnnotationMetadata , Map < String , Map < String , Object > > genericTypes ) { } }
if ( constructorVisitor == null ) { // first build the constructor visitBeanDefinitionConstructorInternal ( annotationMetadata , requiresReflection , argumentTypes , argumentAnnotationMetadata , genericTypes ) ; // now prepare the implementation of the build method . See BeanFactory interface visitBuildMethodDefinition ( annotationMetadata , argumentTypes , argumentAnnotationMetadata ) ; // now override the injectBean method visitInjectMethodDefinition ( ) ; }
public class LocalConnection { /** * ( non - Javadoc ) * @ see javax . jms . Connection # start ( ) */ @ Override public final void start ( ) throws JMSException { } }
externalAccessLock . readLock ( ) . lock ( ) ; try { checkNotClosed ( ) ; if ( started ) return ; started = true ; // Wake up waiting consumers wakeUpLocalConsumers ( ) ; } finally { externalAccessLock . readLock ( ) . unlock ( ) ; }
public class ApolloCallTracker { /** * < p > Removes provided { @ link ApolloQueryCall } that finished his execution , if it is found , else throws an * { @ link AssertionError } . < / p > * If the removal operation is successful and no active running calls are found , then the registered * { @ link ApolloCallTracker # idleResourceCallback } is invoked . * < p > < b > Note < / b > : This method needs to be called right after an apolloCall is completed ( whether successful or * failed ) . < / p > */ void unregisterQueryCall ( @ NotNull ApolloQueryCall apolloQueryCall ) { } }
checkNotNull ( apolloQueryCall , "apolloQueryCall == null" ) ; OperationName operationName = apolloQueryCall . operation ( ) . name ( ) ; unregisterCall ( activeQueryCalls , operationName , apolloQueryCall ) ;
public class IsDateWithTime { /** * Creates a matcher that matches when the examined { @ linkplain Date } has the given values < code > hour < / code > and * < code > ClockPeriod < / code > ( e . g . < em > AM < / em > ) . */ public static Matcher < Date > hasHour ( final int hour , final ClockPeriod clockPeriod ) { } }
return new IsDateWithTime ( hour , clockPeriod , null , null , null ) ;
public class BaseProducerContext { /** * Calls { @ code onIsIntermediateResultExpected } on each element of the list . Does nothing if * list = = null */ public static void callOnIsIntermediateResultExpectedChanged ( @ Nullable List < ProducerContextCallbacks > callbacks ) { } }
if ( callbacks == null ) { return ; } for ( ProducerContextCallbacks callback : callbacks ) { callback . onIsIntermediateResultExpectedChanged ( ) ; }
public class URBridgeGroup { /** * Get the groups for the user and add the specified attributes * to each of the groups . * @ param grpMbrAttrs the attributes to be added to the groups . * @ param countLimit restricts the size of the number of groups returned * for the user . * @ Exception an error occurs in the underlying registry or while * attempting to add the attributes to the group . */ @ Override public void getUsersForGroup ( List < String > grpMbrAttrs , int countLimit ) throws WIMException { } }
String securityName = null ; try { securityName = getSecurityName ( false ) ; List < String > returnNames = urBridge . getUsersForGroup ( securityName , countLimit ) . getList ( ) ; for ( int j = 0 ; j < returnNames . size ( ) ; j ++ ) { Root fakeRoot = new Root ( ) ; PersonAccount memberDO = new PersonAccount ( ) ; fakeRoot . getEntities ( ) . add ( memberDO ) ; IdentifierType identifier = new IdentifierType ( ) ; memberDO . setIdentifier ( identifier ) ; URBridgeEntityFactory osFactory = new URBridgeEntityFactory ( ) ; URBridgeEntity osEntity = osFactory . createObject ( memberDO , urBridge , attrMap , baseEntryName , entityConfigMap ) ; osEntity . setSecurityNameProp ( returnNames . get ( j ) ) ; osEntity . populateEntity ( grpMbrAttrs ) ; osEntity . setRDNPropValue ( returnNames . get ( j ) ) ; ( ( Group ) entity ) . getMembers ( ) . add ( memberDO ) ; } } catch ( Exception e ) { throw new WIMApplicationException ( WIMMessageKey . ENTITY_GET_FAILED , Tr . formatMessage ( tc , WIMMessageKey . ENTITY_GET_FAILED , WIMMessageHelper . generateMsgParms ( securityName , e . toString ( ) ) ) ) ; }
public class ProfilerServiceImpl { /** * Ends the dedicated profile , restarting the background . */ public ProfileReport stop ( ) { } }
if ( ! isValid ( ) ) { return null ; } if ( _state != StateProfile . ACTIVE ) { return null ; } _profileTask . stop ( ) ; ProfileReport report = _profileTask . getReport ( ) ; if ( _backgroundPeriod > 0 ) { _profileTask . setPeriod ( _backgroundPeriod ) ; _profileTask . start ( ) ; _state = StateProfile . BACKGROUND ; } else { _state = StateProfile . IDLE ; } return report ;
public class SofaConfigs { /** * 解析数字型配置 * @ param appName 应用名 * @ param key 配置项 * @ param defaultValue 默认值 * @ return 配置 */ public static int getIntegerValue ( String appName , String key , int defaultValue ) { } }
String ret = getStringValue0 ( appName , key ) ; return StringUtils . isEmpty ( ret ) ? defaultValue : CommonUtils . parseInt ( ret , defaultValue ) ;
public class ApiOvhTelephony { /** * Alter this object properties * REST : PUT / telephony / { billingAccount } / portability / { id } / document / { documentId } * @ param body [ required ] New object properties * @ param billingAccount [ required ] The name of your billingAccount * @ param id [ required ] The ID of the portability * @ param documentId [ required ] Identifier of the document */ public void billingAccount_portability_id_document_documentId_PUT ( String billingAccount , Long id , Long documentId , OvhPortabilityDocument body ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/portability/{id}/document/{documentId}" ; StringBuilder sb = path ( qPath , billingAccount , id , documentId ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class FillManager { /** * Create a list of fills on the map . * Fills are going to be created only for features with a matching geometry . * All supported properties are : < br > * FillOptions . PROPERTY _ FILL _ OPACITY - Float < br > * FillOptions . PROPERTY _ FILL _ COLOR - String < br > * FillOptions . PROPERTY _ FILL _ OUTLINE _ COLOR - String < br > * FillOptions . PROPERTY _ FILL _ PATTERN - String < br > * Learn more about above properties in the < a href = " https : / / www . mapbox . com / mapbox - gl - js / style - spec / " > Style specification < / a > . * Out of spec properties : < br > * " is - draggable " - Boolean , true if the fill should be draggable , false otherwise * @ param featureCollection the featureCollection defining the list of fills to build * @ return the list of built fills */ @ UiThread public List < Fill > create ( @ NonNull FeatureCollection featureCollection ) { } }
List < Feature > features = featureCollection . features ( ) ; List < FillOptions > options = new ArrayList < > ( ) ; if ( features != null ) { for ( Feature feature : features ) { FillOptions option = FillOptions . fromFeature ( feature ) ; if ( option != null ) { options . add ( option ) ; } } } return create ( options ) ;
public class Balancer { /** * / * Given elaspedTime in ms , return a printable string */ private static String time2Str ( long elapsedTime ) { } }
String unit ; double time = elapsedTime ; if ( elapsedTime < 1000 ) { unit = "milliseconds" ; } else if ( elapsedTime < 60 * 1000 ) { unit = "seconds" ; time = time / 1000 ; } else if ( elapsedTime < 3600 * 1000 ) { unit = "minutes" ; time = time / ( 60 * 1000 ) ; } else { unit = "hours" ; time = time / ( 3600 * 1000 ) ; } return time + " " + unit ;
public class IotHubResourcesInner { /** * Get the statistics from an IoT hub . * Get the statistics from an IoT hub . * @ param resourceGroupName The name of the resource group that contains the IoT hub . * @ param resourceName The name of the IoT hub . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RegistryStatisticsInner > getStatsAsync ( String resourceGroupName , String resourceName , final ServiceCallback < RegistryStatisticsInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getStatsWithServiceResponseAsync ( resourceGroupName , resourceName ) , serviceCallback ) ;
public class FacesConfigTypeImpl { /** * Returns all < code > validator < / code > elements * @ return list of < code > validator < / code > */ public List < FacesConfigValidatorType < FacesConfigType < T > > > getAllValidator ( ) { } }
List < FacesConfigValidatorType < FacesConfigType < T > > > list = new ArrayList < FacesConfigValidatorType < FacesConfigType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "validator" ) ; for ( Node node : nodeList ) { FacesConfigValidatorType < FacesConfigType < T > > type = new FacesConfigValidatorTypeImpl < FacesConfigType < T > > ( this , "validator" , childNode , node ) ; list . add ( type ) ; } return list ;
public class Bytes { /** * Creates a Bytes object by copying the data of the CharSequence and encoding it using UTF - 8. */ public static final Bytes of ( CharSequence cs ) { } }
if ( cs instanceof String ) { return of ( ( String ) cs ) ; } Objects . requireNonNull ( cs ) ; if ( cs . length ( ) == 0 ) { return EMPTY ; } ByteBuffer bb = StandardCharsets . UTF_8 . encode ( CharBuffer . wrap ( cs ) ) ; if ( bb . hasArray ( ) ) { // this byte buffer has never escaped so can use its byte array directly return new Bytes ( bb . array ( ) , bb . position ( ) + bb . arrayOffset ( ) , bb . limit ( ) ) ; } else { byte [ ] data = new byte [ bb . remaining ( ) ] ; bb . get ( data ) ; return new Bytes ( data ) ; }
public class AbsFilesScanner { /** * / * map */ public Map < String , T > map ( File [ ] files , int start , int count ) { } }
final Map < String , T > results = new LinkedHashMap < > ( ) ; scan ( files , start , count , new Consumer < File > ( ) { @ Override public void accept ( File file ) { @ Nullable Tuple2 < String , T > mapping = file2map ( file ) ; if ( mapping != null ) { String key = mapping . get0 ( ) ; results . put ( key , results . containsKey ( key ) ? duplicate2map ( results . get ( key ) , mapping . get1 ( ) ) : mapping . get1 ( ) ) ; } } } ) ; return results ;
public class HandlerSocketHandler { /** * Auto reconect request to hs4j server * @ param session */ protected void reconnect ( Session session ) { } }
if ( this . hsClient . isStarted ( ) ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Add reconnectRequest to connector " + session . getRemoteSocketAddress ( ) ) ; } HandlerSocketSession hSession = ( HandlerSocketSession ) session ; InetSocketAddress addr = hSession . getRemoteSocketAddress ( ) ; this . hsClient . getConnector ( ) . addToWatingQueue ( new ReconnectRequest ( addr , 0 , this . hsClient . getHealConnectionInterval ( ) ) ) ; }
public class VersionID { /** * Compares if ' this ' is greater than vid * @ param vid TODO * @ param allowEqual TODO * @ return TODO */ private boolean isGreaterThanOrEqualHelper ( VersionID vid , boolean allowEqual ) { } }
if ( _isCompound ) { if ( ! _rest . isGreaterThanOrEqualHelper ( vid , allowEqual ) ) { return false ; } } // Normalize the two strings String [ ] t1 = normalize ( _tuple , vid . _tuple . length ) ; String [ ] t2 = normalize ( vid . _tuple , _tuple . length ) ; for ( int i = 0 ; i < t1 . length ; i ++ ) { // Compare current element Object e1 = getValueAsObject ( t1 [ i ] ) ; Object e2 = getValueAsObject ( t2 [ i ] ) ; if ( e1 . equals ( e2 ) ) { // So far so good } else { if ( e1 instanceof Integer && e2 instanceof Integer ) { return ( Integer ) e1 > ( Integer ) e2 ; } else { String s1 = t1 [ i ] ; String s2 = t2 [ i ] ; return s1 . compareTo ( s2 ) > 0 ; } } } // If we get here , they are equal return allowEqual ;
public class AccessIdUtil { /** * Given an accessId , extract the realm . * @ param accessId * @ return The realm for the accessId , or { @ code null } if the accessId is invalid */ public static String getRealm ( String accessId ) { } }
Matcher m = matcher ( accessId ) ; if ( m != null ) { return m . group ( 2 ) ; } return null ;
public class RGraph { /** * Parsing of the RGraph . This is the main method * to perform a query . Given the constrains c1 and c2 * defining mandatory elements in G1 and G2 and given * the search options , this method builds an initial set * of starting nodes ( B ) and parses recursively the * RGraph to find a list of solution according to * these parameters . * @ param c1 constrain on the graph G1 * @ param c2 constrain on the graph G2 * @ param findAllStructure true if we want all results to be generated * @ param findAllMap true is we want all possible ' mappings ' */ public void parse ( BitSet c1 , BitSet c2 , boolean findAllStructure , boolean findAllMap ) { } }
// initialize the list of solution solutionList . clear ( ) ; // builds the set of starting nodes // according to the constrains BitSet b = buildB ( c1 , c2 ) ; // setup options setAllStructure ( findAllStructure ) ; setAllMap ( findAllMap ) ; // parse recursively the RGraph parseRec ( new BitSet ( b . size ( ) ) , b , new BitSet ( b . size ( ) ) ) ;
public class Fn { /** * Returns a { @ code Supplier } which returns a single instance created by calling the specified { @ code supplier . get ( ) } . * @ param supplier * @ return */ public static < T > Supplier < T > memoize ( final Supplier < T > supplier ) { } }
return new Supplier < T > ( ) { private volatile boolean initialized = false ; private T instance = null ; @ Override public T get ( ) { if ( initialized == false ) { synchronized ( this ) { if ( initialized == false ) { instance = supplier . get ( ) ; } } } return instance ; } } ;
public class NetUtils { /** * DELETE请求 * @ param url 请求的URL * @ param body 内容正文 * @ return 响应内容 * @ since 1.1.0 */ public static String delete ( String url , Map < String , String > body ) { } }
return delete ( url , JSONObject . toJSONString ( body ) ) ;
public class BaseAssociateLocation2DFilter { /** * Finds the best match for an index in destination and sees if it matches the source index * @ param indexSrc The index in source being examined * @ param bestIndex Index in dst with the best fit to source * @ return true if a match was found and false if not */ private boolean backwardsValidation ( int indexSrc , int bestIndex ) { } }
double bestScoreV = maxError ; int bestIndexV = - 1 ; D d_forward = descDst . get ( bestIndex ) ; setActiveSource ( locationDst . get ( bestIndex ) ) ; for ( int j = 0 ; j < locationSrc . size ( ) ; j ++ ) { // compute distance between the two features double distance = computeDistanceToSource ( locationSrc . get ( j ) ) ; if ( distance > maxDistance ) continue ; D d_v = descSrc . get ( j ) ; double score = scoreAssociation . score ( d_forward , d_v ) ; if ( score < bestScoreV ) { bestScoreV = score ; bestIndexV = j ; } } return bestIndexV == indexSrc ;
public class RetouchedBloomFilter { /** * Performs the selective clearing for a given key . * @ param k The false positive key to remove from < i > this < / i > retouched Bloom filter . * @ param scheme The selective clearing scheme to apply . */ public void selectiveClearing ( Key k , short scheme ) { } }
if ( k == null ) { throw new NullPointerException ( "Key can not be null" ) ; } if ( ! membershipTest ( k ) ) { throw new IllegalArgumentException ( "Key is not a member" ) ; } int index = 0 ; int [ ] h = hash . hash ( k ) ; switch ( scheme ) { case RANDOM : index = randomRemove ( ) ; break ; case MINIMUM_FN : index = minimumFnRemove ( h ) ; break ; case MAXIMUM_FP : index = maximumFpRemove ( h ) ; break ; case RATIO : index = ratioRemove ( h ) ; break ; default : throw new AssertionError ( "Undefined selective clearing scheme" ) ; } clearBit ( index ) ;
public class IpUtils { /** * get the local host ip address . * @ return the local host ip address . */ public static InetAddress getLocalHost ( ) { } }
if ( localHost == null ) { try { localHost = java . net . InetAddress . getLocalHost ( ) ; } catch ( UnknownHostException e ) { LOGGER . error ( "Cannot get local host address. exception is: " + e ) ; localHost = null ; hostName = "UNKNOWN" ; ipAddress = "UNKNOWN" ; } } return localHost ;
public class Humanize { /** * Matches a pace ( value and interval ) with a logical time frame . Very * useful for slow paces . * Examples : * < pre > * < code > * / / 3 occurrences within a 3000ms interval * pace ( 3 , 3000 ) ; / / = > ~ 1 / sec . * / / 200 occurrences within a 70000ms interval * pace ( 200 , 70000 ) ; / / = > ~ 3 / sec . * / / 10 occurrences within a 70000ms interval * pace ( 10 , 70000 ) ; / / = > ~ 9 / min . * / / 14 occurrences within a 31557600000ms interval ( a year ) * pace ( 14 , 31557600000L ) ; / / = > ~ 1 / month * / / 25 occurrences within a 31557600000ms interval * pace ( 25 , 31557600000L ) ; / / = > ~ 2 / month * / / 9 occurrences within a 31557600000ms interval * pace ( 9 , 31557600000L ) ; / / = > > 1 / month ( less than one per month ) * < / code > * < / pre > * @ param value * The number of occurrences within the specified interval * @ param interval * The interval in milliseconds * @ return a Pace instance with data for a given value and interval */ public static Pace pace ( final Number value , final long interval ) { } }
double dval = Math . round ( value . doubleValue ( ) ) ; if ( dval == 0 || interval == 0 ) return Pace . EMPTY ; Pace args = null ; double rate = Math . abs ( dval / interval ) ; TimeMillis [ ] intvls = TimeMillis . values ( ) ; for ( TimeMillis p : intvls ) { double relativePace = rate * p . millis ( ) ; if ( relativePace >= 1 ) { args = new Pace ( Math . round ( relativePace ) , Accuracy . APROX , p ) ; break ; } } if ( args == null ) { args = new Pace ( 1 , Accuracy . LESS_THAN , intvls [ intvls . length - 1 ] ) ; } return args ;
public class CaseForEqBuilder { /** * String */ public Cases < String , StringExpression > then ( String then ) { } }
return thenString ( ConstantImpl . create ( then ) ) ;
public class Stream { /** * Adds items from another stream to the end of the current stream . * @ param with an { @ link Iterable } that should be used to emit items after items in the current stream ran out . * @ return a new stream that contains items from both streams . */ public Stream < T > merge ( final Iterable < ? extends T > with ) { } }
return new Stream < T > ( ) { @ Override public Iterator < T > iterator ( ) { return new ReadOnlyIterator < T > ( ) { Iterator < T > iterator = Stream . this . iterator ( ) ; Iterator < ? extends T > withIterator = with . iterator ( ) ; @ Override public boolean hasNext ( ) { return iterator . hasNext ( ) || withIterator . hasNext ( ) ; } @ Override public T next ( ) { return iterator . hasNext ( ) ? iterator . next ( ) : withIterator . next ( ) ; } } ; } } ;
public class DivSufSort { /** * { @ inheritDoc } * Additional constraints enforced by DivSufSort algorithm : * < ul > * < li > non - negative ( & ge ; 0 ) symbols in the input < / li > * < li > symbols limited by alphabet size passed in the constructor . < / li > * < li > length & gt ; = 2 < / li > * < / ul > */ @ Override public final int [ ] buildSuffixArray ( int [ ] input , int start , int length ) { } }
Tools . assertAlways ( input != null , "input must not be null" ) ; Tools . assertAlways ( length >= 2 , "input length must be >= 2" ) ; MinMax mm = Tools . minmax ( input , start , length ) ; Tools . assertAlways ( mm . min >= 0 , "input must not be negative" ) ; Tools . assertAlways ( mm . max < ALPHABET_SIZE , "max alphabet size is " + ALPHABET_SIZE ) ; final int [ ] ret = new int [ length ] ; this . SA = ret ; this . T = input ; int [ ] bucket_A = new int [ BUCKET_A_SIZE ] ; int [ ] bucket_B = new int [ BUCKET_B_SIZE ] ; this . start = start ; /* Suffixsort . */ int m = sortTypeBstar ( bucket_A , bucket_B , length ) ; constructSuffixArray ( bucket_A , bucket_B , length , m ) ; return ret ;
public class CarePlan { /** * syntactic sugar */ public CarePlanActivityComponent addActivity ( ) { } }
CarePlanActivityComponent t = new CarePlanActivityComponent ( ) ; if ( this . activity == null ) this . activity = new ArrayList < CarePlanActivityComponent > ( ) ; this . activity . add ( t ) ; return t ;
public class IOUtils { /** * Makes sure that the given { @ link File } is either a writable directory , or that it does not exist and a directory * can be created at its path . * < br > * Will throw an exception if the given { @ link File } is actually an existing file , or the directory is not writable * @ param destination the directory which to ensure its existence for * @ throws IOException if an I / O error occurs e . g . when attempting to create the destination directory * @ throws IllegalArgumentException if the destination is an existing file , or the directory is not writable */ public static void requireDirectory ( File destination ) throws IOException , IllegalArgumentException { } }
if ( destination . isFile ( ) ) { throw new IllegalArgumentException ( destination + " exists and is a file, directory or path expected." ) ; } else if ( ! destination . exists ( ) ) { destination . mkdirs ( ) ; } if ( ! destination . canWrite ( ) ) { throw new IllegalArgumentException ( "Can not write to destination " + destination ) ; }
public class ParameterData { /** * / * Sets the parameter values , the type of calibration , and the calibration flag . * Also , the mean of the parameter values is calculated , and the max and min value * of the parameter values are determined . */ public void setStat ( double [ ] dataValue , int calibrationType , boolean [ ] calibrate ) { } }
this . data = dataValue ; this . calibrationType = calibrationType ; this . calibrationFlag = calibrate ; calibrationDataSize = 0 ; for ( int i = 0 ; i < this . calibrationFlag . length ; i ++ ) { if ( calibrate [ i ] ) { calibrationDataSize ++ ; } } calculateMean ( ) ; findMin ( ) ; findMax ( ) ; // setDeviation ( ) ;
public class FileUtils { /** * Test file exists and is readable . * @ param f File to test . * @ exception FileNotFoundException If file does not exist or is not unreadable . */ public static File assertReadable ( final File f ) throws FileNotFoundException { } }
if ( ! f . exists ( ) ) { throw new FileNotFoundException ( f . getAbsolutePath ( ) + " does not exist." ) ; } if ( ! f . canRead ( ) ) { throw new FileNotFoundException ( f . getAbsolutePath ( ) + " is not readable." ) ; } return f ;
public class PotentialDeclaration { /** * Remove values from enums */ private void simplifyEnumValues ( AbstractCompiler compiler ) { } }
if ( getRhs ( ) . isObjectLit ( ) && getRhs ( ) . hasChildren ( ) ) { for ( Node key : getRhs ( ) . children ( ) ) { removeStringKeyValue ( key ) ; } compiler . reportChangeToEnclosingScope ( getRhs ( ) ) ; }
public class AppEventsLogger { /** * Notifies the events system that the app has launched & logs an activatedApp event . Should be called whenever * your app becomes active , typically in the onResume ( ) method of each long - running Activity of your app . * Use this method if your application ID is stored in application metadata , otherwise see * { @ link AppEventsLogger # activateApp ( android . content . Context , String ) } . * @ param context Used to access the applicationId and the attributionId for non - authenticated users . */ public static void activateApp ( Context context ) { } }
Settings . sdkInitialize ( context ) ; activateApp ( context , Utility . getMetadataApplicationId ( context ) ) ;
public class Transfer { /** * Execute the provided function as a transaction asynchronously . This is intended for one - off * fund transfers . For multiple , create an instance . * @ param toAddress destination address * @ param value amount to send * @ param unit of specified send * @ return { @ link RemoteCall } containing executing transaction */ public RemoteCall < TransactionReceipt > sendFunds ( String toAddress , BigDecimal value , Convert . Unit unit ) { } }
return new RemoteCall < > ( ( ) -> send ( toAddress , value , unit ) ) ;
public class GenPyCallExprVisitor { /** * Returns the python name for the template . Suitable for calling within the same module . */ static String getLocalTemplateName ( TemplateNode node ) { } }
String templateName = node . getPartialTemplateName ( ) . substring ( 1 ) ; if ( node . getVisibility ( ) == Visibility . PRIVATE ) { return "__" + templateName ; } return templateName ;
public class RawData2DB { /** * Get a Reuters21578 object from a story String * @ param storyText * @ return */ private Reuters21578 getReuters21578StoryFromText ( String storyText ) { } }
// System . out . println ( storyText ) ; // ATTRIBUTES / / / / / String firstLine = storyText . split ( System . getProperty ( "line.separator" ) ) [ 0 ] ; // System . out . println ( firstLine ) ; String newIdString = extractAttribute ( firstLine , "NEWID" ) ; int newId = Integer . parseInt ( newIdString ) ; String oldIdString = extractAttribute ( firstLine , "OLDID" ) ; int oldId = Integer . parseInt ( oldIdString ) ; String topicsString = extractAttribute ( firstLine , "TOPICS" ) ; boolean topicsBool = topicsString . equalsIgnoreCase ( "YES" ) ; String lewissplitString = extractAttribute ( firstLine , "LEWISSPLIT" ) ; String cgisplitString = extractAttribute ( firstLine , "CGISPLIT" ) ; // DATE / / / / / String dateString = extractTextBetweenTags ( storyText , "DATE" ) ; // System . out . println ( " date = " + dateString ) ; Date date = null ; try { DateFormat sdf = new SimpleDateFormat ( "dd-MMM-yyyy HH:mm:ss.SS" ) ; date = sdf . parse ( dateString ) ; } catch ( ParseException e ) { e . printStackTrace ( ) ; } // TOPICS / / / / / String topicsText = extractTextBetweenTags ( storyText , "TOPICS" ) ; List < String > topicsArray = extractElementAsLines ( topicsText , "D" ) ; // System . out . println ( topicsArray ) ; String topicsCSV = Joiner . join ( "," , topicsArray ) ; // System . out . println ( " topicsCSV : " + topicsCSV ) ; // PLACES / / / / / String placesText = extractTextBetweenTags ( storyText , "PLACES" ) ; List < String > placesArray = extractElementAsLines ( placesText , "D" ) ; String placesCSV = Joiner . join ( "," , placesArray ) ; // PEOPLE / / / / / String peopleText = extractTextBetweenTags ( storyText , "PEOPLE" ) ; List < String > peopleArray = extractElementAsLines ( peopleText , "D" ) ; String peopleCSV = Joiner . join ( "," , peopleArray ) ; // ORGS / / / / / String orgsText = extractTextBetweenTags ( storyText , "ORGS" ) ; List < String > orgsArray = extractElementAsLines ( orgsText , "D" ) ; String orgsCSV = Joiner . join ( "," , orgsArray ) ; // EXCHANGES / / / / / String exchangesText = extractTextBetweenTags ( storyText , "EXCHANGES" ) ; List < String > exchangesArray = extractElementAsLines ( exchangesText , "D" ) ; String exchangesCSV = Joiner . join ( "," , exchangesArray ) ; // COMPANIES / / / / / String companiesText = extractTextBetweenTags ( storyText , "COMPANIES" ) ; List < String > companiesArray = extractElementAsLines ( companiesText , "D" ) ; String companiesCSV = Joiner . join ( "," , companiesArray ) ; // TITLE / / / / / String titleText = extractTextBetweenTags ( storyText , "TITLE" ) ; // DATELINE / / / / / String datelineText = extractTextBetweenTags ( storyText , "DATELINE" ) ; // BODY / / / / / String body = extractTextBetweenTags ( storyText , "BODY" ) . replaceAll ( "\\s+" , " " ) ; // System . out . println ( body ) ; if ( body . length ( ) > maxBodyLength ) { maxBodyLength = body . length ( ) ; } Reuters21578 reuters21578 = new Reuters21578 ( ) ; reuters21578 . setNewid ( newId ) ; reuters21578 . setOldid ( oldId ) ; reuters21578 . setTopicsbool ( topicsBool ) ; reuters21578 . setLewissplit ( lewissplitString ) ; reuters21578 . setCgisplit ( cgisplitString ) ; reuters21578 . setDate ( date ) ; reuters21578 . setTopics ( topicsCSV ) ; reuters21578 . setPlaces ( placesCSV ) ; reuters21578 . setPeople ( peopleCSV ) ; reuters21578 . setOrgs ( orgsCSV ) ; reuters21578 . setExchanges ( exchangesCSV ) ; reuters21578 . setCompanies ( companiesCSV ) ; reuters21578 . setTitle ( titleText ) ; reuters21578 . setDateline ( datelineText ) ; reuters21578 . setBody ( body ) ; // System . out . println ( reuters21578 . toString ( ) ) ; return reuters21578 ;
public class AddMessages { /** * Add messages to the dom4j tree . */ public void execute ( ) { } }
Iterator < ? > elementIter = XMLUtil . selectNodes ( document , "/BugCollection/BugInstance" ) . iterator ( ) ; Iterator < BugInstance > bugInstanceIter = bugCollection . iterator ( ) ; Set < String > bugTypeSet = new HashSet < > ( ) ; Set < String > bugCategorySet = new HashSet < > ( ) ; Set < String > bugCodeSet = new HashSet < > ( ) ; // Add short and long descriptions to BugInstance elements . // We rely on the Document and the BugCollection storing // the bug instances in the same order . while ( elementIter . hasNext ( ) && bugInstanceIter . hasNext ( ) ) { Element element = ( Element ) elementIter . next ( ) ; BugInstance bugInstance = bugInstanceIter . next ( ) ; String bugType = bugInstance . getType ( ) ; bugTypeSet . add ( bugType ) ; BugPattern bugPattern = bugInstance . getBugPattern ( ) ; bugCategorySet . add ( bugPattern . getCategory ( ) ) ; bugCodeSet . add ( bugPattern . getAbbrev ( ) ) ; element . addElement ( "ShortMessage" ) . addText ( bugPattern . getShortDescription ( ) ) ; element . addElement ( "LongMessage" ) . addText ( bugInstance . getMessage ( ) ) ; // Add pre - formatted display strings in " Message " // elements for all bug annotations . Iterator < ? > annElementIter = element . elements ( ) . iterator ( ) ; Iterator < BugAnnotation > annIter = bugInstance . annotationIterator ( ) ; while ( annElementIter . hasNext ( ) && annIter . hasNext ( ) ) { Element annElement = ( Element ) annElementIter . next ( ) ; BugAnnotation ann = annIter . next ( ) ; annElement . addElement ( "Message" ) . addText ( ann . toString ( ) ) ; } } // Add BugPattern elements for each referenced bug types . addBugCategories ( bugCategorySet ) ; addBugPatterns ( bugTypeSet ) ; addBugCodes ( bugCodeSet ) ;
public class DOMNodeSelector { /** * { @ inheritDoc } */ public Node querySelector ( String selectors ) throws NodeSelectorException { } }
Set < Node > result = querySelectorAll ( selectors ) ; if ( result . isEmpty ( ) ) { return null ; } return result . iterator ( ) . next ( ) ;
public class CalibrationIO { /** * Saves stereo camera model to disk * @ param parameters Camera parameters * @ param outputWriter Stream to save the parameters to */ public static void save ( StereoParameters parameters , Writer outputWriter ) { } }
Map < String , Object > map = new HashMap < > ( ) ; map . put ( "model" , MODEL_STEREO ) ; map . put ( VERSION , 0 ) ; map . put ( "left" , putModelRadial ( parameters . left , null ) ) ; map . put ( "right" , putModelRadial ( parameters . right , null ) ) ; map . put ( "rightToLeft" , putSe3 ( parameters . rightToLeft ) ) ; PrintWriter out = new PrintWriter ( outputWriter ) ; out . println ( "# Intrinsic and extrinsic parameters for a stereo camera pair" ) ; Yaml yaml = createYmlObject ( ) ; yaml . dump ( map , out ) ; out . close ( ) ;
public class MiniJPEWriterHandler { /** * { @ inheritDoc } */ public Object addPointToFeature ( Object feature , Object point ) { } }
( ( Map ) feature ) . put ( "geometry" , point ) ; return feature ;
public class Mapper { /** * Transform an array of JSON Objects into a { @ link List } of DTOs . * @ param < T > The object type contained with in the array * @ param jsonArray The { @ link JSONArray } to convert * @ param objectMapper The { @ link Mapper } to use to process the JSON objects * @ return List of DTOs */ protected < T extends Object > List < T > parseArray ( final JSONArray jsonArray , final Mapper < JSONObject , T > objectMapper ) { } }
final List < T > arrayContents = new ArrayList < > ( ) ; if ( jsonArray != null ) { for ( int i = 0 ; i < jsonArray . length ( ) ; i ++ ) { final JSONObject element = jsonArray . getJSONObject ( i ) ; if ( element != null ) { arrayContents . add ( objectMapper . map ( element ) ) ; } } } return arrayContents ;
public class JDBC4ResultSet { /** * language . */ @ Override public Timestamp getTimestamp ( int columnIndex , Calendar cal ) throws SQLException { } }
checkColumnBounds ( columnIndex ) ; throw SQLError . noSupport ( ) ;
public class SeleniumHelper { /** * Executes Javascript in browser and then waits for ' callback ' to be invoked . * If statementPattern should reference the magic ( function ) variable ' callback ' which should be * called to provide this method ' s result . * If the statementPattern contains the magic variable ' arguments ' * the parameters will also be passed to the statement . In the latter case the parameters * must be a number , a boolean , a String , WebElement , or a List of any combination of the above . * @ link http : / / selenium . googlecode . com / git / docs / api / java / org / openqa / selenium / JavascriptExecutor . html # executeAsyncScript ( java . lang . String , % 20java . lang . Object . . . ) * @ param statementPattern javascript to run , possibly with placeholders to be replaced . * @ param parameters placeholder values that should be replaced before executing the script . * @ return return value from statement . */ public Object waitForJavascriptCallback ( String statementPattern , Object ... parameters ) { } }
JavascriptExecutor jse = ( JavascriptExecutor ) driver ( ) ; Object result = JavascriptHelper . waitForJavascriptCallback ( jse , statementPattern , parameters ) ; return result ;
public class ClassContext { /** * Store a method analysis object . Note that the cached analysis object * could be a special value ( indicating null or an exception ) . * @ param analysisClass * class the method analysis object belongs to * @ param methodDescriptor * method descriptor identifying the analyzed method * @ param object * the analysis object to cache */ public void putMethodAnalysis ( Class < ? > analysisClass , MethodDescriptor methodDescriptor , Object object ) { } }
if ( object == null ) { throw new IllegalArgumentException ( ) ; } Map < MethodDescriptor , Object > objectMap = getObjectMap ( analysisClass ) ; objectMap . put ( methodDescriptor , object ) ;
public class Connector { /** * < p > createDefault . < / p > * @ param properties a { @ link java . util . Map } object . * @ return a { @ link ameba . container . server . Connector } object . */ public static Connector createDefault ( Map < String , String > properties ) { } }
Connector . Builder builder = Connector . Builder . create ( ) . rawProperties ( properties ) . secureEnabled ( Boolean . parseBoolean ( properties . get ( "ssl.enabled" ) ) ) . sslProtocol ( properties . get ( "ssl.protocol" ) ) . sslClientMode ( Boolean . parseBoolean ( properties . get ( "ssl.clientMode" ) ) ) . sslNeedClientAuth ( Boolean . parseBoolean ( properties . get ( "ssl.needClientAuth" ) ) ) . sslWantClientAuth ( Boolean . parseBoolean ( properties . get ( "ssl.wantClientAuth" ) ) ) . sslKeyManagerFactoryAlgorithm ( properties . get ( "ssl.key.manager.factory.algorithm" ) ) . sslKeyPassword ( properties . get ( "ssl.key.password" ) ) . sslKeyStoreProvider ( properties . get ( "ssl.key.store.provider" ) ) . sslKeyStoreType ( properties . get ( "ssl.key.store.type" ) ) . sslKeyStorePassword ( properties . get ( "ssl.key.store.password" ) ) . sslTrustManagerFactoryAlgorithm ( properties . get ( "ssl.trust.manager.factory.algorithm" ) ) . sslTrustPassword ( properties . get ( "ssl.trust.password" ) ) . sslTrustStoreProvider ( properties . get ( "ssl.trust.store.provider" ) ) . sslTrustStoreType ( properties . get ( "ssl.trust.store.type" ) ) . sslTrustStorePassword ( properties . get ( "ssl.trust.store.password" ) ) . ajpEnabled ( Boolean . parseBoolean ( properties . get ( "ajp.enabled" ) ) ) . host ( StringUtils . defaultIfBlank ( properties . get ( "host" ) , "0.0.0.0" ) ) . port ( Integer . valueOf ( StringUtils . defaultIfBlank ( properties . get ( "port" ) , "80" ) ) ) . name ( properties . get ( "name" ) ) ; String keyStoreFile = properties . get ( "ssl.key.store.file" ) ; if ( StringUtils . isNotBlank ( keyStoreFile ) ) try { builder . sslKeyStoreFile ( readByteArrayFromResource ( keyStoreFile ) ) ; } catch ( IOException e ) { logger . error ( "读取sslKeyStoreFile出错" , e ) ; } String trustStoreFile = properties . get ( "ssl.trust.store.file" ) ; if ( StringUtils . isNotBlank ( trustStoreFile ) ) try { builder . sslTrustStoreFile ( readByteArrayFromResource ( trustStoreFile ) ) ; } catch ( IOException e ) { logger . error ( "读取sslTrustStoreFile出错" , e ) ; } return builder . build ( ) ;
public class PathSequence { /** * documentation inherited from interface Path */ public void init ( Pathable pable , long tickStamp ) { } }
_pable = pable ; _pableRep = new DelegatingPathable ( _pable ) { @ Override public void pathCompleted ( long timeStamp ) { long initStamp ; // if we just finished a timed path , we can figure out how long ago it really // finished and init the next path at that time in the past . if ( _curPath instanceof TimedPath ) { initStamp = _lastInit + ( ( TimedPath ) _curPath ) . _duration ; } else { // we don ' t know initStamp = timeStamp ; } initNextPath ( initStamp , timeStamp ) ; } } ; initNextPath ( tickStamp , tickStamp ) ;
public class SSLSessionStrategyFactory { /** * for compatibility purposes . */ private static SSLContextBuilder loadKeyMaterial ( SSLContextBuilder builder , File file , char [ ] ksp , char [ ] kp , PrivateKeyStrategy privateKeyStrategy ) throws NoSuchAlgorithmException , KeyStoreException , UnrecoverableKeyException , CertificateException , IOException { } }
Args . notNull ( file , "Keystore file" ) ; // $ NON - NLS - 1 $ final KeyStore identityStore = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; final FileInputStream instream = new FileInputStream ( file ) ; try { identityStore . load ( instream , ksp ) ; } finally { instream . close ( ) ; } return builder . loadKeyMaterial ( identityStore , kp , privateKeyStrategy ) ;
public class SerializerIntrinsics { /** * Packed Shuffle word ( SSE ) . */ public final void pshufw ( MMRegister dst , MMRegister src , Immediate imm8 ) { } }
emitX86 ( INST_PSHUFW , dst , src , imm8 ) ;
public class SCM { /** * Adds environmental variables for the builds to the given map . * This can be used to propagate information from SCM to builds * ( for example , SVN revision number . ) * This method is invoked whenever someone does { @ link AbstractBuild # getEnvironment ( TaskListener ) } , via * { @ link # buildEnvVars ( AbstractBuild , Map ) } , which can be before / after your checkout method is invoked . So if you * are going to provide information about check out ( like SVN revision number that was checked out ) , be prepared * for the possibility that the check out hasn ' t happened yet . * @ since 2.60 */ public void buildEnvironment ( @ Nonnull Run < ? , ? > build , @ Nonnull Map < String , String > env ) { } }
if ( build instanceof AbstractBuild ) { buildEnvVars ( ( AbstractBuild ) build , env ) ; }
public class Photo { /** * the value of the photo as URI . Check first with { @ link Photo # getValueType ( ) } if the type is * { @ link PhotoValueType # URI } * @ return returns the value of the photo as URI */ public URI getValueAsURI ( ) { } }
URI uri ; try { uri = new URI ( super . getValue ( ) ) ; } catch ( URISyntaxException e ) { throw new SCIMDataValidationException ( e . getMessage ( ) , e ) ; } return uri ;
public class FamiliarRefreshRecyclerView { /** * Automatic pull refresh */ public void autoRefresh ( ) { } }
if ( ! isPullRefreshEnabled ) return ; setRefreshing ( true ) ; new android . os . Handler ( ) . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { callOnPullRefresh ( ) ; } } , 1000 ) ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcConstructionEquipmentResourceTypeEnum createIfcConstructionEquipmentResourceTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcConstructionEquipmentResourceTypeEnum result = IfcConstructionEquipmentResourceTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class PlaceController { /** * Handles basic place controller action events . Derived classes should be sure to call * < code > super . handleAction < / code > for events they don ' t specifically handle . */ @ Override public boolean handleAction ( final ActionEvent action ) { } }
final boolean [ ] handled = new boolean [ 1 ] ; // let our delegates have a crack at the action applyToDelegates ( new DelegateOp ( PlaceControllerDelegate . class ) { @ Override public void apply ( PlaceControllerDelegate delegate ) { // we take advantage of short - circuiting here handled [ 0 ] = handled [ 0 ] || delegate . handleAction ( action ) ; } } ) ; // if they didn ' t handle it , pass it off to the super class return handled [ 0 ] || super . handleAction ( action ) ;
public class Clock { /** * Defines if the control should use the given customFont * to render all text elements * @ param ENABLED */ public void setCustomFontEnabled ( final boolean ENABLED ) { } }
if ( null == customFontEnabled ) { _customFontEnabled = ENABLED ; fireUpdateEvent ( RESIZE_EVENT ) ; } else { customFontEnabled . set ( ENABLED ) ; }
public class RetrySimulator { /** * Execute the simulator for a give # of iterations . * @ param numSimulations Number of simulations to run * @ return the outcome of all simulations */ public RetrySimulation executeSimulation ( int numSimulations ) { } }
RetrySimulation simulation = new RetrySimulation ( ) ; for ( int i = 0 ; i < numSimulations ; i ++ ) { simulation . addSequence ( executeSingleSimulation ( ) ) ; } return simulation ;
public class ADictionary { /** * get the key ' s type index located in ILexicon interface * @ param key * @ return int */ public static int getIndex ( String key ) { } }
if ( key == null ) { return - 1 ; } key = key . toUpperCase ( ) ; if ( key . startsWith ( "CJK_WORD" ) ) { return ILexicon . CJK_WORD ; } else if ( key . startsWith ( "CJK_CHAR" ) ) { return ILexicon . CJK_CHAR ; } else if ( key . startsWith ( "CJK_UNIT" ) ) { return ILexicon . CJK_UNIT ; } else if ( key . startsWith ( "CN_LNAME_ADORN" ) ) { return ILexicon . CN_LNAME_ADORN ; } else if ( key . startsWith ( "CN_LNAME" ) ) { return ILexicon . CN_LNAME ; } else if ( key . startsWith ( "CN_SNAME" ) ) { return ILexicon . CN_SNAME ; } else if ( key . startsWith ( "CN_DNAME_1" ) ) { return ILexicon . CN_DNAME_1 ; } else if ( key . startsWith ( "CN_DNAME_2" ) ) { return ILexicon . CN_DNAME_2 ; } else if ( key . startsWith ( "STOP_WORD" ) ) { return ILexicon . STOP_WORD ; } else if ( key . startsWith ( "DOMAIN_SUFFIX" ) ) { return ILexicon . DOMAIN_SUFFIX ; } else if ( key . startsWith ( "NUMBER_UNIT" ) ) { return ILexicon . NUMBER_UNIT ; } else if ( key . startsWith ( "CJK_SYN" ) ) { return ILexicon . CJK_SYN ; } return ILexicon . CJK_WORD ;
public class CmsEditor { /** * Shows the selected error page in case of an exception . < p > * @ param editor initialized instance of the editor class * @ param exception the current exception * @ throws JspException if inclusion of the error page fails */ protected void showErrorPage ( Object editor , Exception exception ) throws JspException { } }
// save initialized instance of the editor class in request attribute for included sub - elements getJsp ( ) . getRequest ( ) . setAttribute ( SESSION_WORKPLACE_CLASS , editor ) ; // reading of file contents failed , show error dialog setAction ( ACTION_SHOW_ERRORMESSAGE ) ; setParamTitle ( key ( Messages . GUI_TITLE_EDIT_1 , new Object [ ] { CmsResource . getName ( getParamResource ( ) ) } ) ) ; if ( exception != null ) { getJsp ( ) . getRequest ( ) . setAttribute ( ATTRIBUTE_THROWABLE , exception ) ; if ( CmsLog . getLog ( editor ) . isWarnEnabled ( ) ) { CmsLog . getLog ( editor ) . warn ( exception . getLocalizedMessage ( ) , exception ) ; } } // include the common error dialog getJsp ( ) . include ( FILE_DIALOG_SCREEN_ERRORPAGE ) ;
public class NIOServerCnxn { /** * Close resources associated with the sock of this cnxn . */ private void closeSock ( ) { } }
if ( sock == null ) { return ; } LOG . debug ( "Closed socket connection for client " + sock . socket ( ) . getRemoteSocketAddress ( ) + ( sessionId != 0 ? " which had sessionid 0x" + Long . toHexString ( sessionId ) : " (no session established for client)" ) ) ; try { /* * The following sequence of code is stupid ! You would think that * only sock . close ( ) is needed , but alas , it doesn ' t work that way . * If you just do sock . close ( ) there are cases where the socket * doesn ' t actually close . . . */ sock . socket ( ) . shutdownOutput ( ) ; } catch ( IOException e ) { // This is a relatively common exception that we can ' t avoid if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "ignoring exception during output shutdown" , e ) ; } } try { sock . socket ( ) . shutdownInput ( ) ; } catch ( IOException e ) { // This is a relatively common exception that we can ' t avoid if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "ignoring exception during input shutdown" , e ) ; } } try { sock . socket ( ) . close ( ) ; } catch ( IOException e ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "ignoring exception during socket close" , e ) ; } } try { sock . close ( ) ; // XXX The next line doesn ' t seem to be needed , but some posts // to forums suggest that it is needed . Keep in mind if errors in // this section arise . // factory . selector . wakeup ( ) ; } catch ( IOException e ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "ignoring exception during socketchannel close" , e ) ; } } sock = null ;
public class SimpleCSVStream { /** * ( non - Javadoc ) * @ see moa . streams . InstanceStream # restart ( ) */ @ Override public void restart ( ) { } }
try { if ( fileReader != null ) { fileReader . close ( ) ; } InputStream fileStream = new FileInputStream ( this . csvFileOption . getFile ( ) ) ; this . fileProgressMonitor = new InputStreamProgressMonitor ( fileStream ) ; this . fileReader = new BufferedReader ( new InputStreamReader ( fileProgressMonitor ) ) ; String line ; do { line = this . fileReader . readLine ( ) ; if ( line == null ) { break ; } line = line . trim ( ) ; } while ( line . isEmpty ( ) || line . charAt ( 0 ) == '%' || line . charAt ( 0 ) == '@' ) ; if ( line != null ) { StringTokenizer token = new StringTokenizer ( line , splitCharOption . getValue ( ) ) ; this . numTokens = token . countTokens ( ) ; this . numAttributes = this . numTokens - ( classIndexOption . isSet ( ) ? 1 : 0 ) + 1 ; ArrayList < Attribute > attributes = new ArrayList < Attribute > ( this . numAttributes ) ; for ( int i = 1 ; i < this . numAttributes ; i ++ ) { attributes . add ( new Attribute ( "Dim " + i ) ) ; } ArrayList < String > classLabels = new ArrayList < String > ( ) ; classLabels . add ( "0" ) ; attributes . add ( new Attribute ( "class" , classLabels ) ) ; this . dataset = new Instances ( csvFileOption . getFile ( ) . getName ( ) , attributes , 0 ) ; this . dataset . setClassIndex ( this . numAttributes - 1 ) ; numAttsOption = new IntOption ( "numAtts" , 'a' , "" , this . numAttributes ) ; double [ ] value = new double [ this . numAttributes ] ; for ( int i = 0 ; i < this . numTokens && token . hasMoreTokens ( ) ; i ++ ) { value [ i ] = Double . valueOf ( token . nextToken ( ) ) ; } this . lastInstanceRead = new InstanceExample ( new DenseInstance ( 1 , value ) ) ; this . lastInstanceRead . getData ( ) . setDataset ( this . dataset ) ; this . numInstancesRead = 0 ; this . hitEndOfFile = false ; } else { this . lastInstanceRead = null ; this . numInstancesRead = 0 ; this . hitEndOfFile = true ; } } catch ( IOException ioe ) { throw new RuntimeException ( "SimpleCSVStream restart failed." , ioe ) ; }
public class HttpStream { /** * Initializes the stream for the next request . */ private void init ( PathImpl path ) { } }
_contentLength = - 1 ; _isChunked = false ; _isRequestDone = false ; _didGet = false ; _isPost = false ; _isHead = false ; _method = null ; _attributes . clear ( ) ; // setPath ( path ) ; if ( path instanceof HttpPath ) _virtualHost = ( ( HttpPath ) path ) . getVirtualHost ( ) ;
public class RxUtil { /** * Returns the concatenation of two { @ link Observable } s but the first * sequence will be emitted in its entirety and ignored before o2 starts * emitting . * @ param < T > * the generic type of the second observable * @ param o1 * the sequence to ignore * @ param o2 * the sequence to emit after o1 ignored * @ return observable result of concatenating two observables , ignoring the * first */ @ SuppressWarnings ( "unchecked" ) public static < T > Observable < T > concatButIgnoreFirstSequence ( Observable < ? > o1 , Observable < T > o2 ) { } }
return Observable . concat ( ( Observable < T > ) o1 . ignoreElements ( ) , o2 ) ;
public class AbstractOperatingSystemWrapper { /** * Run a shell command . * @ param command is the shell command to run . * @ return the standard output */ protected static String runCommand ( String ... command ) { } }
try { final Process p = Runtime . getRuntime ( ) . exec ( command ) ; if ( p == null ) { return null ; } final StringBuilder bStr = new StringBuilder ( ) ; try ( InputStream standardOutput = p . getInputStream ( ) ) { final byte [ ] buffer = new byte [ BUFFER_SIZE ] ; int len ; while ( ( len = standardOutput . read ( buffer ) ) > 0 ) { bStr . append ( new String ( buffer , 0 , len ) ) ; } p . waitFor ( ) ; return bStr . toString ( ) ; } } catch ( Exception e ) { return null ; }
public class GetDocumentationVersionsResult { /** * The current page of elements from this collection . * @ param items * The current page of elements from this collection . */ public void setItems ( java . util . Collection < DocumentationVersion > items ) { } }
if ( items == null ) { this . items = null ; return ; } this . items = new java . util . ArrayList < DocumentationVersion > ( items ) ;
public class PoolBase { /** * This will create a string for debug logging . Given a set of " reset bits " , this * method will return a concatenated string , for example : * Input : 0b00110 * Output : " autoCommit , isolation " * @ param bits a set of " reset bits " * @ return a string of which states were reset */ private String stringFromResetBits ( final int bits ) { } }
final StringBuilder sb = new StringBuilder ( ) ; for ( int ndx = 0 ; ndx < RESET_STATES . length ; ndx ++ ) { if ( ( bits & ( 0b1 << ndx ) ) != 0 ) { sb . append ( RESET_STATES [ ndx ] ) . append ( ", " ) ; } } sb . setLength ( sb . length ( ) - 2 ) ; // trim trailing comma return sb . toString ( ) ;
public class AbstractReplicator { /** * - ( bool ) handleError : ( C4Error ) c4err */ private boolean handleError ( C4Error c4err ) { } }
// If this is a transient error , or if I ' m continuous and the error might go away with a change // in network ( i . e . network down , hostname unknown ) , then go offline and retry later . final boolean isTransient = C4Replicator . mayBeTransient ( c4err ) || ( ( c4err . getDomain ( ) == C4Constants . ErrorDomain . WEB_SOCKET ) && ( c4err . getCode ( ) == C4WebSocketCloseCode . kWebSocketCloseUserTransient ) ) ; final boolean isNetworkDependent = C4Replicator . mayBeNetworkDependent ( c4err ) ; if ( ! isTransient && ! ( config . isContinuous ( ) && isNetworkDependent ) ) { return false ; // nope , this is permanent } if ( ! config . isContinuous ( ) && retryCount >= MAX_ONE_SHOT_RETRY_COUNT ) { return false ; // too many retries } clearRepl ( ) ; if ( ! isTransient ) { Log . i ( DOMAIN , "%s: Network error (%s); will retry when network changes..." , this , c4err ) ; } else { // On transient error , retry periodically , with exponential backoff : final int delay = retryDelay ( ++ retryCount ) ; Log . i ( DOMAIN , "%s: Transient error (%s); will retry in %d sec..." , this , c4err , delay ) ; handler . schedule ( new Runnable ( ) { @ Override public void run ( ) { retry ( ) ; } } , delay , TimeUnit . SECONDS ) ; } // Also retry when the network changes : startReachabilityObserver ( ) ; return true ;
public class MessageHeader { /** * Get the header with the given name . * The lookup is case insensitive . * @ param name The name of the header . * @ return The header value . */ public Optional < String > getHeader ( String name ) { } }
PSequence < String > values = lowercaseHeaders . get ( name . toLowerCase ( Locale . ENGLISH ) ) ; if ( values == null || values . isEmpty ( ) ) { return Optional . empty ( ) ; } else { return Optional . of ( values . get ( 0 ) ) ; }
public class ESClient { /** * Adds the source . * @ param entity * the entity * @ param values * the values * @ param entityType * the entity type */ private void addSource ( Object entity , Map < String , Object > values , EntityType entityType ) { } }
Set < Attribute > attributes = entityType . getAttributes ( ) ; for ( Attribute attrib : attributes ) { if ( ! attrib . isAssociation ( ) ) { Object value = PropertyAccessorHelper . getObject ( entity , ( Field ) attrib . getJavaMember ( ) ) ; values . put ( ( ( AbstractAttribute ) attrib ) . getJPAColumnName ( ) , value ) ; } }
public class Chunk { /** * Sets a goto for a remote destination for this < CODE > Chunk < / CODE > . * @ param filename * the file name of the destination document * @ param page * the page of the destination to go to . First page is 1 * @ return this < CODE > Chunk < / CODE > */ public Chunk setRemoteGoto ( String filename , int page ) { } }
return setAttribute ( REMOTEGOTO , new Object [ ] { filename , Integer . valueOf ( page ) } ) ;
public class NumberFormat { /** * Parses text from the given string as a CurrencyAmount . Unlike * the parse ( ) method , this method will attempt to parse a generic * currency name , searching for a match of this object ' s locale ' s * currency display names , or for a 3 - letter ISO currency code . * This method will fail if this format is not a currency format , * that is , if it does not contain the currency pattern symbol * ( U + 00A4 ) in its prefix or suffix . * @ param text the text to parse * @ param pos input - output position ; on input , the position within * text to match ; must have 0 & lt ; = pos . getIndex ( ) & lt ; text . length ( ) ; * on output , the position after the last matched character . If * the parse fails , the position in unchanged upon output . * @ return a CurrencyAmount , or null upon failure */ public CurrencyAmount parseCurrency ( CharSequence text , ParsePosition pos ) { } }
// / CLOVER : OFF // Default implementation only - - subclasses should override Number n = parse ( text . toString ( ) , pos ) ; return n == null ? null : new CurrencyAmount ( n , getEffectiveCurrency ( ) ) ; // / CLOVER : ON
public class Serialized { /** * Returns the deserialized objects from the given { @ link File } as an * { @ link Observable } stream . Uses buffer of size < code > bufferSize < / code > * buffer reads from the File . * @ param file * the input file * @ param bufferSize * the buffer size for reading bytes from the file . * @ param < T > * the generic type of the deserialized objects returned in the * stream * @ return the stream of deserialized objects from the { @ link InputStream } * as an { @ link Observable } . */ public static < T extends Serializable > Observable < T > read ( final File file , final int bufferSize ) { } }
Func0 < ObjectInputStream > resourceFactory = new Func0 < ObjectInputStream > ( ) { @ Override public ObjectInputStream call ( ) { try { return new ObjectInputStream ( new BufferedInputStream ( new FileInputStream ( file ) , bufferSize ) ) ; } catch ( FileNotFoundException e ) { throw new RuntimeException ( e ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } } ; Func1 < ObjectInputStream , Observable < ? extends T > > observableFactory = new Func1 < ObjectInputStream , Observable < ? extends T > > ( ) { @ Override public Observable < ? extends T > call ( ObjectInputStream is ) { return read ( is ) ; } } ; Action1 < ObjectInputStream > disposeAction = new Action1 < ObjectInputStream > ( ) { @ Override public void call ( ObjectInputStream ois ) { try { ois . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } } ; return Observable . using ( resourceFactory , observableFactory , disposeAction , true ) ;
public class EmbeddedWikipediaExample { /** * Set bootstrap lookback , i . e . oldest revision to pull . */ @ CliObjectOption ( description = "Sets the period for which articles should be pulled in ISO time format (e.g. P2D, PT1H)" ) public EmbeddedWikipediaExample lookback ( String isoLookback ) { } }
this . setConfiguration ( WikipediaExtractor . BOOTSTRAP_PERIOD , isoLookback ) ; return this ;
public class SimpleDeploymentDescription { /** * Creates a simple deployment description . * @ param name the name for the deployment * @ param serverGroups the server groups * @ return the deployment description */ public static SimpleDeploymentDescription of ( final String name , @ SuppressWarnings ( "TypeMayBeWeakened" ) final Set < String > serverGroups ) { } }
final SimpleDeploymentDescription result = of ( name ) ; if ( serverGroups != null ) { result . addServerGroups ( serverGroups ) ; } return result ;
public class SubCommandMetaSet { /** * Sets < cluster . xml , stores . xml > metadata pair atomically . * @ param adminClient An instance of AdminClient points to given cluster * @ param nodeIds Node ids to set metadata * @ param clusterValue Cluster value to set * @ param storesValue Stores value to set */ public static void doMetaSetPair ( AdminClient adminClient , List < Integer > nodeIds , Object clusterValue , Object storesValue ) { } }
VectorClock updatedClusterVersion = null ; VectorClock updatedStoresVersion = null ; for ( Integer nodeId : nodeIds ) { if ( updatedClusterVersion == null && updatedStoresVersion == null ) { updatedClusterVersion = ( VectorClock ) adminClient . metadataMgmtOps . getRemoteMetadata ( nodeId , MetadataStore . CLUSTER_KEY ) . getVersion ( ) ; updatedStoresVersion = ( VectorClock ) adminClient . metadataMgmtOps . getRemoteMetadata ( nodeId , MetadataStore . STORES_KEY ) . getVersion ( ) ; } else { updatedClusterVersion = updatedClusterVersion . merge ( ( VectorClock ) adminClient . metadataMgmtOps . getRemoteMetadata ( nodeId , MetadataStore . CLUSTER_KEY ) . getVersion ( ) ) ; updatedStoresVersion = updatedStoresVersion . merge ( ( VectorClock ) adminClient . metadataMgmtOps . getRemoteMetadata ( nodeId , MetadataStore . STORES_KEY ) . getVersion ( ) ) ; } // TODO : This will work for now but we should take a step back // and // think about a uniform clock for the metadata values . updatedClusterVersion = updatedClusterVersion . incremented ( nodeIds . iterator ( ) . next ( ) , System . currentTimeMillis ( ) ) ; updatedStoresVersion = updatedStoresVersion . incremented ( nodeIds . iterator ( ) . next ( ) , System . currentTimeMillis ( ) ) ; } adminClient . metadataMgmtOps . updateRemoteMetadataPair ( nodeIds , MetadataStore . CLUSTER_KEY , Versioned . value ( clusterValue . toString ( ) , updatedClusterVersion ) , MetadataStore . STORES_KEY , Versioned . value ( storesValue . toString ( ) , updatedStoresVersion ) ) ;
public class PageWrapper { /** * Updates the offset of the wrapped BTreePage . * @ param value The offset to assign . */ void setOffset ( long value ) { } }
if ( this . pointer != null && this . offset . get ( ) != this . pointer . getOffset ( ) ) { // We have already assigned an offset to this . throw new IllegalStateException ( "Cannot assign offset more than once." ) ; } this . offset . set ( value ) ;
public class RequestMessage { /** * @ see javax . servlet . ServletRequest # getAttributeNames ( ) */ @ Trivial @ Override public Enumeration < String > getAttributeNames ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Getting attribute names" ) ; } return Collections . enumeration ( this . attributes . keySet ( ) ) ;
public class NodeSequence { /** * Create a copy of a batch that always has a { @ link Batch # rowCount ( ) } . * @ param batch the original batch ; may be null * @ return the batch that has a true { @ link Batch # rowCount ( ) } , or the original batch if null or empty or if the original has a * non - negative row count */ public static Batch copy ( Batch batch ) { } }
if ( batch == null ) return batch ; if ( batch . isEmpty ( ) || batch . width ( ) < 1 ) return emptyBatch ( batch . getWorkspaceName ( ) , 1 ) ; // Otherwise , create a copy . . . return batch . width ( ) == 1 ? new SingleWidthBatch ( batch ) : new MultiWidthBatch ( batch ) ;
public class ButtonSerializer { /** * ( non - Javadoc ) * @ see * com . google . gson . JsonDeserializer # deserialize ( com . google . gson . JsonElement , * java . lang . reflect . Type , com . google . gson . JsonDeserializationContext ) */ public Button deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { } }
String buttonTypeString = json . getAsJsonObject ( ) . get ( "type" ) . getAsString ( ) ; ButtonType buttonType = ButtonType . valueOf ( buttonTypeString . toUpperCase ( ) ) ; Class < ? > buttonClass = getButtonClass ( buttonType ) ; return context . deserialize ( json , buttonClass ) ;
public class Style { /** * Default material indigo transparent style for SuperToasts . * @ return A new Style */ public static Style indigo ( ) { } }
final Style style = new Style ( ) ; style . color = PaletteUtils . getSolidColor ( PaletteUtils . MATERIAL_INDIGO ) ; return style ;
public class PowerAdapter { /** * Concatenates the views represented by the specified layout resources with this adapter , and returns the result . * @ param layoutResources The layout resources that will be inflated and prepended to this adapter . * @ return The resulting composite adapter . * @ see ViewFactories # asViewFactory ( int ) * @ see # prepend ( PowerAdapter . . . ) * @ see # concat ( PowerAdapter . . . ) */ @ CheckResult @ NonNull public final PowerAdapter prepend ( @ NonNull @ LayoutRes int ... layoutResources ) { } }
checkNotNull ( layoutResources , "layoutResources" ) ; if ( layoutResources . length == 0 ) { return this ; } return prepend ( asAdapter ( layoutResources ) ) ;