signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RowDataTransformer { /** * 设置对应的目标库schema . name , 需要考虑mutl配置情况 * < pre > * case : * 1 . 源 : offer , 目 : offer * 2 . 源 : offer [ 1-128 ] , 目 : offer * 3 . 源 : offer [ 1-128 ] , 目 : offer [ 1-128] * 4 . 源 : offer , 目 : offer [ 1-128 ] 不支持 , 会报错 */ private void buildName ( EventData data , EventData result , DataMediaPair pair ) { } }
DataMedia targetDataMedia = pair . getTarget ( ) ; DataMedia sourceDataMedia = pair . getSource ( ) ; String schemaName = buildName ( data . getSchemaName ( ) , sourceDataMedia . getNamespaceMode ( ) , targetDataMedia . getNamespaceMode ( ) ) ; String tableName = buildName ( data . getTableName ( ) , sourceDataMedia . getNameMode ( ) , targetDataMedia . getNameMode ( ) ) ; result . setSchemaName ( schemaName ) ; result . setTableName ( tableName ) ;
public class CachedResourceBundlesHandler { /** * ( non - Javadoc ) * @ see net . jawr . web . resource . bundle . handler . ResourceBundlesHandler # * rebuildModifiedBundles ( ) */ @ Override public void rebuildModifiedBundles ( ) { } }
List < JoinableResourceBundle > bundlesToRebuild = rsHandler . getBundlesToRebuild ( ) ; for ( JoinableResourceBundle bundle : bundlesToRebuild ) { ResourceBundlePathsIterator bundlePaths = this . getBundlePaths ( bundle . getId ( ) , new NoCommentCallbackHandler ( ) , Collections . EMPTY_MAP ) ; while ( bundlePaths . hasNext ( ) ) { BundlePath bundlePath = bundlePaths . next ( ) ; cacheMgr . remove ( TEXT_CACHE_PREFIX + bundlePath . getPath ( ) ) ; cacheMgr . remove ( ZIP_CACHE_PREFIX + bundlePath . getPath ( ) ) ; } } rsHandler . rebuildModifiedBundles ( ) ;
public class JmsSyncProducer { /** * Create new JMS session . * @ param connection to use for session creation . * @ return session . * @ throws JMSException */ protected void createSession ( Connection connection ) throws JMSException { } }
if ( session == null ) { if ( ! endpointConfiguration . isPubSubDomain ( ) && connection instanceof QueueConnection ) { session = ( ( QueueConnection ) connection ) . createQueueSession ( false , Session . AUTO_ACKNOWLEDGE ) ; } else if ( endpointConfiguration . isPubSubDomain ( ) && endpointConfiguration . getConnectionFactory ( ) instanceof TopicConnectionFactory ) { session = ( ( TopicConnection ) connection ) . createTopicSession ( false , Session . AUTO_ACKNOWLEDGE ) ; } else { log . warn ( "Not able to create a session with connection factory '" + endpointConfiguration . getConnectionFactory ( ) + "'" + " when using setting 'publish-subscribe-domain' (=" + endpointConfiguration . isPubSubDomain ( ) + ")" ) ; session = connection . createSession ( false , Session . AUTO_ACKNOWLEDGE ) ; } }
public class DatabaseEventManager { /** * Fires all collected insertion , deletion or update events as one * DataStoreEvent , i . e . notifies all registered DataStoreListener how the * content of the database has been changed since * { @ link # accumulateDataStoreEvents ( ) } was called . * @ see # accumulateDataStoreEvents * @ see DataStoreListener * @ see DataStoreEvent */ public void flushDataStoreEvents ( ) { } }
DataStoreEvent e ; switch ( currentDataStoreEventType ) { case INSERT : e = DataStoreEvent . insertionEvent ( dataStoreObjects ) ; break ; case REMOVE : e = DataStoreEvent . removalEvent ( dataStoreObjects ) ; break ; case UPDATE : e = DataStoreEvent . updateEvent ( dataStoreObjects ) ; break ; default : return ; } for ( int i = dataListenerList . size ( ) ; -- i >= 0 ; ) { dataListenerList . get ( i ) . contentChanged ( e ) ; } // reset accumulateDataStoreEvents = false ; currentDataStoreEventType = null ; dataStoreObjects = null ;
public class CredHubTemplateFactory { /** * Create a { @ link ReactiveCredHubTemplate } for interaction with a CredHub server * using OAuth2 for authentication . * @ param credHubProperties connection properties * @ param clientOptions connection options * @ param clientRegistrationRepository a repository of OAuth2 client registrations * @ param authorizedClientRepository a repository of OAuth2 client authorizations * @ return a { @ code ReactiveCredHubTemplate } */ public ReactiveCredHubOperations reactiveCredHubTemplate ( CredHubProperties credHubProperties , ClientOptions clientOptions , ReactiveClientRegistrationRepository clientRegistrationRepository , ServerOAuth2AuthorizedClientRepository authorizedClientRepository ) { } }
return new ReactiveCredHubTemplate ( credHubProperties , clientHttpConnector ( clientOptions ) , clientRegistrationRepository , authorizedClientRepository ) ;
public class FactionWarfareApi { /** * List of the top corporations in faction warfare Top 10 leaderboard of * corporations for kills and victory points separated by total , last week * and yesterday - - - This route expires daily at 11:05 * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return ApiResponse & lt ; FactionWarfareLeaderboardCorporationsResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < FactionWarfareLeaderboardCorporationsResponse > getFwLeaderboardsCorporationsWithHttpInfo ( String datasource , String ifNoneMatch ) throws ApiException { } }
com . squareup . okhttp . Call call = getFwLeaderboardsCorporationsValidateBeforeCall ( datasource , ifNoneMatch , null ) ; Type localVarReturnType = new TypeToken < FactionWarfareLeaderboardCorporationsResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class snmpcommunity { /** * Use this API to add snmpcommunity resources . */ public static base_responses add ( nitro_service client , snmpcommunity resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { snmpcommunity addresources [ ] = new snmpcommunity [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new snmpcommunity ( ) ; addresources [ i ] . communityname = resources [ i ] . communityname ; addresources [ i ] . permissions = resources [ i ] . permissions ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class AnnotationTargetsImpl_Targets { /** * When reading from serialization , don ' t set the referenced classes . */ protected void i_setSuperclassName ( String i_subclassName , String i_superclassName ) { } }
i_superclassNameMap . put ( i_subclassName , i_superclassName ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Subclass [ {1} ] has superclass [ {2} ]" , new Object [ ] { getHashText ( ) , i_subclassName , i_superclassName } ) ) ; }
public class TracerFactory { /** * Reads the configuration from the given InputStream . * @ param inputStream the input stream providing the configuration . * @ throws TracerFactory . Exception indicates a configuration problem * @ see TracerFactory # readConfiguration ( java . io . File ) */ public void readConfiguration ( InputStream inputStream ) throws TracerFactory . Exception { } }
if ( this . traceConfigSchema == null ) System . err . println ( "CAUTION: Unable to validate the given configuration against a schema." ) ; DocumentBuilderFactory builderFactory = javax . xml . parsers . DocumentBuilderFactory . newInstance ( ) ; builderFactory . setNamespaceAware ( true ) ; builderFactory . setXIncludeAware ( false ) ; try { DocumentBuilder parser = builderFactory . newDocumentBuilder ( ) ; Document tracerConfigDoc = parser . parse ( inputStream ) ; if ( this . traceConfigSchema != null ) { DOMSource domSource = new DOMSource ( tracerConfigDoc ) ; Validator traceConfigValidator = this . traceConfigSchema . newValidator ( ) ; TracerFactory . ErrorHandler errorHandler = new TracerFactory . ErrorHandler ( ) ; traceConfigValidator . setErrorHandler ( errorHandler ) ; traceConfigValidator . validate ( domSource ) ; } this . poolWriteLock . lock ( ) ; try { NodeList tracerNodes = ( NodeList ) this . xpath . evaluate ( "/dns:TraceConfig/dns:Pool/dns:TraceLogger" , tracerConfigDoc . getDocumentElement ( ) , XPathConstants . NODESET ) ; System . out . println ( ) ; System . out . println ( "Configured Pool Tracers = " + tracerNodes . getLength ( ) ) ; for ( int i = 0 ; i < tracerNodes . getLength ( ) ; i ++ ) { System . out . println ( ) ; System . out . println ( "(+) " + ( i + 1 ) + ". TraceLogger" ) ; Element tracerElement = ( Element ) tracerNodes . item ( i ) ; if ( ! tracerElement . hasAttribute ( "name" ) ) throw new TracerFactory . Exception ( "Missing 'name' attribut." ) ; String name = tracerElement . getAttribute ( "name" ) ; String className = tracerElement . getAttribute ( "class" ) ; System . out . println ( "name = " + name ) ; System . out . println ( "className = " + className ) ; Class < ? > tracerClass = Class . forName ( className ) ; if ( ! AbstractTracer . class . isAssignableFrom ( tracerClass ) ) throw new TracerFactory . Exception ( "Illegal tracer class!" ) ; AbstractTracer tracer = createTracer ( ( Class < ? extends AbstractTracer > ) tracerClass , name ) ; tracer . readConfiguration ( this . xpath , tracerElement ) ; // synchronized ( this . mapGuardObj ) { // this . tracerPool . put ( name , tracer ) ; this . tracerPool . put ( name , tracer ) ; } NodeList threadNodes = ( NodeList ) this . xpath . evaluate ( "/dns:TraceConfig/dns:Map/dns:Threads/dns:Thread" , tracerConfigDoc . getDocumentElement ( ) , XPathConstants . NODESET ) ; System . out . println ( ) ; System . out . println ( "Configured Tracermappings = " + threadNodes . getLength ( ) ) ; for ( int i = 0 ; i < threadNodes . getLength ( ) ; i ++ ) { System . out . println ( ) ; System . out . println ( "(+) " + ( i + 1 ) + ". Mapping" ) ; Element threadElement = ( Element ) threadNodes . item ( i ) ; String threadName = threadElement . getAttribute ( "name" ) ; String referencedTracerName = ( String ) this . xpath . evaluate ( "./dns:TraceLogger/@ref" , threadElement , XPathConstants . STRING ) ; System . out . println ( threadName + " => " + referencedTracerName ) ; this . threadName2Element . put ( threadName , threadElement ) ; } Node defaultTracerNode = ( Node ) this . xpath . evaluate ( "/dns:TraceConfig/dns:DefaultTracer" , tracerConfigDoc . getDocumentElement ( ) , XPathConstants . NODE ) ; if ( defaultTracerNode != null ) { String className = ( ( Element ) defaultTracerNode ) . getAttribute ( "class" ) ; Class < ? > tracerClass = Class . forName ( className ) ; if ( ! NullTracer . class . isAssignableFrom ( tracerClass ) ) throw new TracerFactory . Exception ( "Requiring a NullTracer as default tracer!" ) ; this . defaultTracer = createTracer ( ( Class < ? extends NullTracer > ) tracerClass ) ; } else { this . defaultTracer = TracerFactory . NULLTRACER ; } } finally { this . poolWriteLock . unlock ( ) ; } this . queueWriteLock . lock ( ) ; try { Node queueNode = ( Node ) this . xpath . evaluate ( "/dns:TraceConfig/dns:Queue" , tracerConfigDoc . getDocumentElement ( ) , XPathConstants . NODE ) ; if ( queueNode != null ) { this . queueConfig = new Queue ( queueNode ) ; } else { this . queueConfig = new Queue ( ) ; } } finally { this . queueWriteLock . unlock ( ) ; } } catch ( IOException | ParserConfigurationException | SAXException | XPathExpressionException | ClassNotFoundException | AbstractTracer . Exception ex ) { throw new TracerFactory . Exception ( ex ) ; }
public class CPDefinitionOptionRelPersistenceImpl { /** * Caches the cp definition option rels in the entity cache if it is enabled . * @ param cpDefinitionOptionRels the cp definition option rels */ @ Override public void cacheResult ( List < CPDefinitionOptionRel > cpDefinitionOptionRels ) { } }
for ( CPDefinitionOptionRel cpDefinitionOptionRel : cpDefinitionOptionRels ) { if ( entityCache . getResult ( CPDefinitionOptionRelModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionOptionRelImpl . class , cpDefinitionOptionRel . getPrimaryKey ( ) ) == null ) { cacheResult ( cpDefinitionOptionRel ) ; } else { cpDefinitionOptionRel . resetOriginalValues ( ) ; } }
public class ExcludingRuleImpl { /** * { @ inheritDoc } */ public boolean suiteFor ( NodeData state ) { } }
boolean suiteForPath = excludePath == null ? true : validateByPath ( state ) ; boolean suiteForNodeType = excludeNodeType == null ? true : validateByNodeType ( state ) ; return suiteForPath && suiteForNodeType ;
public class SQLiteDelegate { /** * Convenience method for inserting a row into the database . * @ param dto any object * @ return T object with the * @ throws android . database . sqlite . SQLiteException Error inserting */ @ Override public synchronized T create ( T dto ) throws Exception { } }
long rowid = db . insert ( transformer . getTableName ( ) , null , transformer . transform ( dto ) ) ; Log . i ( this . getClass ( ) . getName ( ) , "ROW ID: " + rowid ) ; if ( rowid == - 1 ) throw new SQLiteException ( "Error inserting " + dto . getClass ( ) . toString ( ) ) ; return transformer . setId ( dto , ( int ) rowid ) ;
public class CachedDirectoryLookupService { /** * Stop the CachedDirectoryLookupService . * It is thread safe . */ @ Override public void stop ( ) { } }
if ( isStarted . compareAndSet ( true , false ) ) { // if you shutdown it , it can not be use anymore super . stop ( ) ; ScheduledExecutorService service = this . syncService . getAndSet ( newSyncService ( ) ) ; service . shutdown ( ) ; LOGGER . info ( "Cache sync Service is shutdown" ) ; for ( Entry < String , ModelServiceClientCache > entry : cache . entrySet ( ) ) { removeInstanceChangeListener ( entry . getKey ( ) , entry . getValue ( ) ) ; } getCache ( ) . clear ( ) ; }
public class Distill { /** * Split using delimiter and convert to slash notation . */ static String [ ] parsePackages ( String packages ) { } }
if ( packages == null || packages . trim ( ) . length ( ) == 0 ) { return new String [ 0 ] ; } String [ ] commaSplit = packages . split ( "," ) ; String [ ] processPackages = new String [ commaSplit . length ] ; for ( int i = 0 ; i < commaSplit . length ; i ++ ) { processPackages [ i ] = convert ( commaSplit [ i ] ) ; } return processPackages ;
public class TaskState { /** * Convert this { @ link TaskState } to a json document . * @ param jsonWriter a { @ link com . google . gson . stream . JsonWriter } used to write the json document * @ throws IOException */ public void toJson ( JsonWriter jsonWriter , boolean keepConfig ) throws IOException { } }
jsonWriter . beginObject ( ) ; jsonWriter . name ( "task id" ) . value ( this . getTaskId ( ) ) . name ( "task state" ) . value ( this . getWorkingState ( ) . name ( ) ) . name ( "start time" ) . value ( this . getStartTime ( ) ) . name ( "end time" ) . value ( this . getEndTime ( ) ) . name ( "duration" ) . value ( this . getTaskDuration ( ) ) . name ( "retry count" ) . value ( this . getPropAsInt ( ConfigurationKeys . TASK_RETRIES_KEY , 0 ) ) ; // Also add failure exception information if it exists . This information is useful even in the // case that the task finally succeeds so we know what happened in the course of task execution . if ( getTaskFailureException ( ) . isPresent ( ) ) { jsonWriter . name ( "exception" ) . value ( getTaskFailureException ( ) . get ( ) ) ; } if ( keepConfig ) { jsonWriter . name ( "properties" ) ; jsonWriter . beginObject ( ) ; for ( String key : this . getPropertyNames ( ) ) { jsonWriter . name ( key ) . value ( this . getProp ( key ) ) ; } jsonWriter . endObject ( ) ; } jsonWriter . endObject ( ) ;
public class MtasSolrCollectionResult { /** * Merge . * @ param newItem the new item * @ throws IOException Signals that an I / O exception has occurred . */ public void merge ( MtasSolrCollectionResult newItem ) throws IOException { } }
if ( action != null && newItem . action != null ) { if ( action . equals ( ComponentCollection . ACTION_CREATE ) && newItem . action . equals ( ComponentCollection . ACTION_CREATE ) ) { values . addAll ( newItem . values ) ; if ( id != null && ( newItem . id == null || ! newItem . id . equals ( id ) ) ) { id = null ; } } else if ( action . equals ( ComponentCollection . ACTION_LIST ) ) { if ( list != null ) { HashMap < String , SimpleOrderedMap < Object > > index = new HashMap < > ( ) ; for ( SimpleOrderedMap < Object > item : list ) { if ( item . get ( "id" ) != null && item . get ( "id" ) instanceof String ) { index . put ( ( String ) item . get ( "id" ) , item ) ; if ( item . get ( "shards" ) == null || ! ( item . get ( "shards" ) instanceof List ) ) { item . add ( "shards" , new ArrayList < > ( ) ) ; } } } for ( SimpleOrderedMap < Object > item : newItem . list ) { if ( item . get ( "id" ) != null && item . get ( "id" ) instanceof String ) { String id = ( String ) item . get ( "id" ) ; if ( index . containsKey ( id ) ) { SimpleOrderedMap < Object > indexItem = index . get ( id ) ; List < SimpleOrderedMap < Object > > shards ; if ( indexItem . get ( "shards" ) != null && indexItem . get ( "shards" ) instanceof List ) { shards = ( List < SimpleOrderedMap < Object > > ) indexItem . get ( "shards" ) ; } else { shards = new ArrayList < > ( ) ; indexItem . add ( "shards" , shards ) ; } shards . add ( item ) ; } } } } } else if ( action . equals ( ComponentCollection . ACTION_CHECK ) || action . equals ( ComponentCollection . ACTION_POST ) || action . equals ( ComponentCollection . ACTION_IMPORT ) || action . equals ( ComponentCollection . ACTION_CREATE ) || action . equals ( ComponentCollection . ACTION_GET ) ) { if ( status != null && status . get ( "id" ) != null && status . get ( "id" ) instanceof String ) { String id = ( String ) status . get ( "id" ) ; if ( id . equals ( newItem . id ) ) { List < SimpleOrderedMap < Object > > shards ; if ( status . get ( "shards" ) != null && status . get ( "shards" ) instanceof List ) { shards = ( List < SimpleOrderedMap < Object > > ) status . get ( "shards" ) ; } else { shards = new ArrayList < > ( ) ; status . add ( "shards" , shards ) ; } if ( newItem . status != null ) { if ( action . equals ( ComponentCollection . ACTION_GET ) ) { newItem . status . add ( "values" , newItem . values ) ; } shards . add ( newItem . status ) ; } } } } else { throw new IOException ( "not allowed for action '" + action + "'" ) ; } }
public class SecurityInitializer { /** * Called during ORB initialization . If a service must resolve initial * references as part of its initialization , it can assume that all * initial references will be available at this point . * Calling the < code > post _ init < / code > operations is not the final * task of ORB initialization . The final task , following the * < code > post _ init < / code > calls , is attaching the lists of registered * interceptors to the ORB . Therefore , the ORB does not contain the * interceptors during calls to < code > post _ init < / code > . If an * ORB - mediated call is made from within < code > post _ init < / code > , no * request interceptors will be invoked on that call . * Likewise , if an operation is performed which causes an IOR to be * created , no IOR interceptors will be invoked . * @ param info provides initialization attributes and * operations by which Interceptors can be registered . */ @ Override public void post_init ( ORBInitInfo info ) { } }
try { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . debug ( tc , "Registering interceptors and policy factories" ) ; TSSConfig config = Util . getRegisteredTSSConfig ( info . orb_id ( ) ) ; try { Codec codec ; try { codec = info . codec_factory ( ) . create_codec ( CDR_1_2_ENCODING ) ; } catch ( UnknownEncoding e ) { INITIALIZE err = new org . omg . CORBA . INITIALIZE ( "Could not create CDR 1.2 codec" ) ; err . initCause ( e ) ; throw err ; } info . add_client_request_interceptor ( new ClientSecurityInterceptor ( codec ) ) ; info . add_server_request_interceptor ( new ServerSecurityInterceptor ( codec ) ) ; info . add_ior_interceptor ( new IORSecurityInterceptor ( config , codec ) ) ; } catch ( DuplicateName dn ) { Tr . error ( tc , "Error registering interceptor" , dn ) ; } info . register_policy_factory ( ClientPolicyFactory . POLICY_TYPE , new ClientPolicyFactory ( ) ) ; info . register_policy_factory ( ServerPolicyFactory . POLICY_TYPE , new ServerPolicyFactory ( ) ) ; } catch ( RuntimeException re ) { Tr . error ( tc , "Error registering interceptor" , re ) ; throw re ; }
public class UintMap { /** * Set int value of the key . * If key does not exist , also set its object value to null . */ public void put ( int key , int value ) { } }
if ( key < 0 ) Kit . codeBug ( ) ; int index = ensureIndex ( key , true ) ; if ( ivaluesShift == 0 ) { int N = 1 << power ; // keys . length can be N * 2 after clear which set ivaluesShift to 0 if ( keys . length != N * 2 ) { int [ ] tmp = new int [ N * 2 ] ; System . arraycopy ( keys , 0 , tmp , 0 , N ) ; keys = tmp ; } ivaluesShift = N ; } keys [ ivaluesShift + index ] = value ;
public class PagerRenderer { /** * Build the anchor * @ param appender * @ param queryParams * @ param labelKey */ protected final void buildAnchor ( AbstractRenderAppender appender , Map queryParams , String labelKey ) { } }
assert appender != null ; assert queryParams != null ; assert labelKey != null && labelKey . length ( ) > 0 ; _anchorState . href = buildPageUri ( queryParams ) ; _anchorTag . doStartTag ( appender , _anchorState ) ; appender . append ( _gridModel . getMessage ( labelKey ) ) ; _anchorTag . doEndTag ( appender ) ; _anchorState . clear ( ) ;
public class SequenceNumberAuditor { /** * Check the public and the private sequence * @ param jsonArray */ private void checkPublicAndPrivateSequence ( final JSONArray jsonArray ) { } }
final long nextPublicSequnceNumber = jsonArray . getLong ( jsonArray . length ( ) - 2 ) ; final long nextPrivateSequnceNumber = jsonArray . getLong ( jsonArray . length ( ) - 1 ) ; auditPublicSequence ( nextPublicSequnceNumber ) ; auditPrivateSequence ( nextPrivateSequnceNumber ) ;
public class PaigeTarjan { /** * Creates a new block . The { @ link Block # low } and { @ link Block # high } fields will be initialized to { @ code - 1 } . * @ return a newly created block . */ public Block createBlock ( ) { } }
Block b = new Block ( - 1 , - 1 , numBlocks ++ , blocklistHead ) ; blocklistHead = b ; return b ;
public class AWSBackupClient { /** * Returns two sets of metadata key - value pairs . The first set lists the metadata that the recovery point was * created with . The second set lists the metadata key - value pairs that are required to restore the recovery point . * These sets can be the same , or the restore metadata set can contain different values if the target service to be * restored has changed since the recovery point was created and now requires additional or different information in * order to be restored . * @ param getRecoveryPointRestoreMetadataRequest * @ return Result of the GetRecoveryPointRestoreMetadata operation returned by the service . * @ throws ResourceNotFoundException * A resource that is required for the action doesn ' t exist . * @ throws InvalidParameterValueException * Indicates that something is wrong with a parameter ' s value . For example , the value is out of range . * @ throws MissingParameterValueException * Indicates that a required parameter is missing . * @ throws ServiceUnavailableException * The request failed due to a temporary failure of the server . * @ sample AWSBackup . GetRecoveryPointRestoreMetadata * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / backup - 2018-11-15 / GetRecoveryPointRestoreMetadata " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetRecoveryPointRestoreMetadataResult getRecoveryPointRestoreMetadata ( GetRecoveryPointRestoreMetadataRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetRecoveryPointRestoreMetadata ( request ) ;
public class AmazonDirectConnectClient { /** * Deprecated . Use < a > DescribeLoa < / a > instead . * Gets the LOA - CFA for the specified interconnect . * The Letter of Authorization - Connecting Facility Assignment ( LOA - CFA ) is a document that is used when * establishing your cross connect to AWS at the colocation facility . For more information , see < a * href = " https : / / docs . aws . amazon . com / directconnect / latest / UserGuide / Colocation . html " > Requesting Cross Connects at * AWS Direct Connect Locations < / a > in the < i > AWS Direct Connect User Guide < / i > . * @ param describeInterconnectLoaRequest * @ return Result of the DescribeInterconnectLoa operation returned by the service . * @ throws DirectConnectServerException * A server - side error occurred . * @ throws DirectConnectClientException * One or more parameters are not valid . * @ sample AmazonDirectConnect . DescribeInterconnectLoa * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / DescribeInterconnectLoa " * target = " _ top " > AWS API Documentation < / a > */ @ Override @ Deprecated public DescribeInterconnectLoaResult describeInterconnectLoa ( DescribeInterconnectLoaRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeInterconnectLoa ( request ) ;
public class SarlFieldBuilderImpl { /** * Initialize the Ecore element . * @ param container the container of the SarlField . * @ param name the name of the SarlField . */ public void eInit ( XtendTypeDeclaration container , String name , String modifier , IJvmTypeProvider context ) { } }
setTypeResolutionContext ( context ) ; if ( this . sarlField == null ) { this . container = container ; this . sarlField = SarlFactory . eINSTANCE . createSarlField ( ) ; this . sarlField . setAnnotationInfo ( XtendFactory . eINSTANCE . createXtendMember ( ) ) ; this . sarlField . setName ( name ) ; if ( Strings . equal ( modifier , "var" ) || Strings . equal ( modifier , "val" ) ) { this . sarlField . getModifiers ( ) . add ( modifier ) ; } else { throw new IllegalStateException ( "Invalid modifier" ) ; } container . getMembers ( ) . add ( this . sarlField ) ; }
public class MessageBuilder { /** * Creates a ACK message . * @ param zxid the zxid of the transaction ACK . * @ return a protobuf message . */ public static Message buildAck ( Zxid zxid ) { } }
ZabMessage . Zxid zzxid = toProtoZxid ( zxid ) ; Ack ack = Ack . newBuilder ( ) . setZxid ( zzxid ) . build ( ) ; return Message . newBuilder ( ) . setType ( MessageType . ACK ) . setAck ( ack ) . build ( ) ;
public class Menu { /** * { @ inheritDoc } */ @ Override protected void add ( final long _sortId , final long _id ) throws CacheReloadException { } }
final Command command = Command . get ( _id ) ; if ( command == null ) { final Menu subMenu = Menu . get ( _id ) ; add ( _sortId , subMenu ) ; } else { add ( _sortId , command ) ; }
public class JMElasticsearchClient { /** * Gets filtered index list . * @ param containedString the contained string * @ return the filtered index list */ public List < String > getFilteredIndexList ( String containedString ) { } }
return getAllIndices ( ) . stream ( ) . filter ( index -> index . contains ( containedString ) ) . collect ( toList ( ) ) ;
public class D6Crud { /** * Execute select statement for the joined multiple table . < br > * < br > * < br > * - About SQL < br > * You can use prepared SQL . < br > * < br > * In addition , you can also use non - wildcard ( ' ? ' ) SQL ( = raw SQL ) . In this * case searchKeys must be null or empty array ( size 0 array ) . < br > * When you use a wildcard ( ' ? ' ) , you must not include the " ' " ( = > single * quotes ) to preparedSQL . < br > * < br > * - About processing < br > * Used when you execute the SQL that is JOIN multiple tables . < br > * In this method , you can specify more than one model class . < br > * When the column name specified in the annotation of the model classes is * included in the resultSet , < br > * a value corresponding to the column name is set to the corresponding * field of model objects . < br > * In other words , if multiple model class has the same column name , values * in the resultSet is set in the same manner for each mode class . < br > * @ param preparedSql * @ param searchKeys * If the prepared SQL includes a wild card ( ? ) , Here is list of * the string to be substituted for wild card . * The order of value to be included in the array must be the * same as order of appearance of the wild card . * @ param modelClazz * More than one model class in a comma - separated manner for * mapping the results * @ return SQL execution result is returned as MAP . < br > * MAP , key is the model class , value is of instance of the model * class specified as key . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public Map < Class < ? > , List < Object > > execSelectTableWithJoin ( String preparedSql , Object [ ] searchKeys , Class < ? extends D6Model > ... modelClazz ) { log ( "#execSelectTableWithJoin preparedSql=" + preparedSql + " searchKeys=" + searchKeys + " modelClazz=" + modelClazz ) ; final Map < Class < ? > , List < Object > > resultMap = new HashMap < Class < ? > , List < Object > > ( ) ; final List < ModelWrapper > modelList = new ArrayList < ModelWrapper > ( ) ; for ( int i = 0 ; i < modelClazz . length ; i ++ ) { @ SuppressWarnings ( "unchecked" ) final ModelWrapper model = new ModelWrapper ( modelClazz [ i ] ) ; modelList . add ( model ) ; } PreparedStatement preparedStmt = null ; ResultSet rs = null ; final Connection conn = createConnection ( ) ; try { preparedStmt = conn . prepareStatement ( preparedSql , ResultSet . TYPE_SCROLL_INSENSITIVE , ResultSet . CONCUR_READ_ONLY ) ; final StringBuilder logSb = new StringBuilder ( ) ; if ( searchKeys != null ) { logSb . append ( "/ " ) ; for ( int i = 0 ; i < searchKeys . length ; i ++ ) { Object object = searchKeys [ i ] ; setObject ( ( i + 1 ) , preparedStmt , object ) ; logSb . append ( "key(" + ( i + 1 ) + ")=" + searchKeys [ i ] ) ; logSb . append ( " " ) ; } } log ( "#execSelectTableWithJoin SQL=" + preparedSql + " " + logSb . toString ( ) ) ; // execute SQL rs = preparedStmt . executeQuery ( ) ; final ResultSetMetaData rsMetaData = rs . getMetaData ( ) ; final int numberOfColumns = rsMetaData . getColumnCount ( ) ; final List < String > columnNameList = new ArrayList < String > ( ) ; // cache column names of this result set for ( int i = 0 ; i < numberOfColumns ; i ++ ) { String columnName = rsMetaData . getColumnName ( i + 1 ) ; columnNameList . add ( columnName ) ; } while ( rs . next ( ) ) { // Processing of a single resultset [ begin ] = = = = = for ( int i = 0 ; i < numberOfColumns ; i ++ ) { // Get from the current resultSet final String columnName = columnNameList . get ( i ) ; final Object value = rs . getObject ( i + 1 ) ; // Set the values to all the properties of model class ( You // know property is corresponding to each column of the DB ) // via modelWrapper for ( ModelWrapper model : modelList ) { // set value to model wrapper model . setValue ( columnName , value ) ; } } // Processing of a single resultset [ end ] = = = = = for ( ModelWrapper model : modelList ) { final Class < ? > modelClazzName = model . getClazz ( ) ; List < Object > modelObjectList = resultMap . get ( modelClazzName ) ; // Generate the result list corresponding to a certain model // class if the list have not been generated . if ( modelObjectList == null ) { modelObjectList = new ArrayList < Object > ( ) ; resultMap . put ( modelClazzName , modelObjectList ) ; } // Generates a model object having a property value held in // the model wrapper , and stores the model object in the // modelObjectList final Object resultModelObject = model . getAsObject ( ) ; modelObjectList . add ( resultModelObject ) ; model . initializeFieldMap ( ) ; } } } catch ( Exception e ) { loge ( "#execSelectTableWithJoin General " , e ) ; } finally { try { if ( rs != null ) { rs . close ( ) ; } if ( preparedStmt != null ) { preparedStmt . close ( ) ; } if ( conn != null ) { conn . close ( ) ; } } catch ( SQLException e ) { loge ( "#execSelectTableWithJoin SQLException " , e ) ; } } return resultMap ;
public class WebhooksInner { /** * Retrieve a list of webhooks . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; WebhookInner & gt ; object */ public Observable < Page < WebhookInner > > listByAutomationAccountNextAsync ( final String nextPageLink ) { } }
return listByAutomationAccountNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < WebhookInner > > , Page < WebhookInner > > ( ) { @ Override public Page < WebhookInner > call ( ServiceResponse < Page < WebhookInner > > response ) { return response . body ( ) ; } } ) ;
public class CommercePriceListAccountRelServiceBaseImpl { /** * Sets the commerce tier price entry local service . * @ param commerceTierPriceEntryLocalService the commerce tier price entry local service */ public void setCommerceTierPriceEntryLocalService ( com . liferay . commerce . price . list . service . CommerceTierPriceEntryLocalService commerceTierPriceEntryLocalService ) { } }
this . commerceTierPriceEntryLocalService = commerceTierPriceEntryLocalService ;
public class Normalizer2Impl { /** * at the cost of building the FCD trie for a decomposition normalizer . */ public boolean hasDecompBoundary ( int c , boolean before ) { } }
for ( ; ; ) { if ( c < minDecompNoCP ) { return true ; } int norm16 = getNorm16 ( c ) ; if ( isHangul ( norm16 ) || isDecompYesAndZeroCC ( norm16 ) ) { return true ; } else if ( norm16 > MIN_NORMAL_MAYBE_YES ) { return false ; // ccc ! = 0 } else if ( isDecompNoAlgorithmic ( norm16 ) ) { c = mapAlgorithmic ( c , norm16 ) ; } else { // c decomposes , get everything from the variable - length extra data int firstUnit = extraData . charAt ( norm16 ) ; if ( ( firstUnit & MAPPING_LENGTH_MASK ) == 0 ) { return false ; } if ( ! before ) { // decomp after - boundary : same as hasFCDBoundaryAfter ( ) , // fcd16 < = 1 | | trailCC = = 0 if ( firstUnit > 0x1ff ) { return false ; // trailCC > 1 } if ( firstUnit <= 0xff ) { return true ; // trailCC = = 0 } // if ( trailCC = = 1 ) test leadCC = = 0 , same as checking for before - boundary } // true if leadCC = = 0 ( hasFCDBoundaryBefore ( ) ) return ( firstUnit & MAPPING_HAS_CCC_LCCC_WORD ) == 0 || ( extraData . charAt ( norm16 - 1 ) & 0xff00 ) == 0 ; } }
public class ClassInfo { /** * Filter classes according to scan spec and class type . * @ param classes * the classes * @ param scanSpec * the scan spec * @ param strictWhitelist * If true , exclude class if it is is external , blacklisted , or a system class . * @ param classTypes * the class types * @ return the filtered classes . */ private static Set < ClassInfo > filterClassInfo ( final Collection < ClassInfo > classes , final ScanSpec scanSpec , final boolean strictWhitelist , final ClassType ... classTypes ) { } }
if ( classes == null ) { return Collections . < ClassInfo > emptySet ( ) ; } boolean includeAllTypes = classTypes . length == 0 ; boolean includeStandardClasses = false ; boolean includeImplementedInterfaces = false ; boolean includeAnnotations = false ; for ( final ClassType classType : classTypes ) { switch ( classType ) { case ALL : includeAllTypes = true ; break ; case STANDARD_CLASS : includeStandardClasses = true ; break ; case IMPLEMENTED_INTERFACE : includeImplementedInterfaces = true ; break ; case ANNOTATION : includeAnnotations = true ; break ; case INTERFACE_OR_ANNOTATION : includeImplementedInterfaces = includeAnnotations = true ; break ; default : throw new IllegalArgumentException ( "Unknown ClassType: " + classType ) ; } } if ( includeStandardClasses && includeImplementedInterfaces && includeAnnotations ) { includeAllTypes = true ; } final Set < ClassInfo > classInfoSetFiltered = new LinkedHashSet < > ( classes . size ( ) ) ; for ( final ClassInfo classInfo : classes ) { // Check class type against requested type ( s ) if ( ( includeAllTypes || includeStandardClasses && classInfo . isStandardClass ( ) || includeImplementedInterfaces && classInfo . isImplementedInterface ( ) || includeAnnotations && classInfo . isAnnotation ( ) ) // Always check blacklist && ! scanSpec . classOrPackageIsBlacklisted ( classInfo . name ) // Always return whitelisted classes , or external classes if enableExternalClasses is true && ( ! classInfo . isExternalClass || scanSpec . enableExternalClasses // Return external ( non - whitelisted ) classes if viewing class hierarchy " upwards " || ! strictWhitelist ) ) { // Class passed strict whitelist criteria classInfoSetFiltered . add ( classInfo ) ; } } return classInfoSetFiltered ;
public class SyntheticStorableBuilder { /** * ( non - Javadoc ) * @ see com . amazon . carbonado . synthetic . SyntheticBuilder # prepare ( ) */ public ClassFileBuilder prepare ( ) throws SupportException { } }
if ( mPrimaryKey == null ) { throw new IllegalStateException ( "Primary key not defined" ) ; } // Clear the cached result , if any mStorableClass = null ; mClassFileGenerator = new StorableClassFileBuilder ( mClassNameProvider , mLoader , SyntheticStorableBuilder . class , mEvolvable ) ; ClassFile cf = mClassFileGenerator . getClassFile ( ) ; for ( SyntheticProperty prop : mPropertyList ) { definePropertyBeanMethods ( cf , prop ) ; } definePrimaryKey ( cf ) ; defineAlternateKeys ( cf ) ; defineIndexes ( cf ) ; return mClassFileGenerator ;
public class BaseBuffer { /** * Compare this fields with the next data in the buffer . * This is a utility method that compares the record . * @ param field The target field . * @ return True if they are equal . */ public boolean compareNextToField ( FieldInfo field ) // Must be to call right Get calls { } }
Object objNext = this . getNextData ( ) ; if ( objNext == DATA_ERROR ) return false ; // EOF if ( objNext == DATA_EOF ) return false ; // EOF if ( objNext == DATA_SKIP ) return true ; // Don ' t set this field Object objField = field . getData ( ) ; if ( ( objNext == null ) || ( objField == null ) ) { if ( ( objNext == null ) && ( objField == null ) ) return true ; return false ; } return objNext . equals ( objField ) ;
public class VirtualGrid { /** * Adjusts the given time either rounding it up or down . * @ param time * the time to adjust * @ param roundUp * the rounding direction * @ param firstDayOfWeek * the first day of the week ( needed for rounding weeks ) * @ return the adjusted time */ public ZonedDateTime adjustTime ( ZonedDateTime time , boolean roundUp , DayOfWeek firstDayOfWeek ) { } }
Instant instant = time . toInstant ( ) ; ZoneId zoneId = time . getZone ( ) ; instant = adjustTime ( instant , zoneId , roundUp , firstDayOfWeek ) ; return ZonedDateTime . ofInstant ( instant , zoneId ) ;
public class DSClientFactory { /** * Gets the pooling options . * @ param connectionProperties * the connection properties * @ return the pooling options */ private PoolingOptions getPoolingOptions ( Properties connectionProperties ) { } }
// minSimultaneousRequests , maxSimultaneousRequests , coreConnections , // maxConnections PoolingOptions options = new PoolingOptions ( ) ; String hostDistance = connectionProperties . getProperty ( "hostDistance" ) ; String maxConnectionsPerHost = connectionProperties . getProperty ( "maxConnectionsPerHost" ) ; String maxRequestsPerConnection = connectionProperties . getProperty ( "maxRequestsPerConnection" ) ; String coreConnections = connectionProperties . getProperty ( "coreConnections" ) ; if ( ! StringUtils . isBlank ( hostDistance ) ) { HostDistance hostDist = HostDistance . valueOf ( hostDistance . toUpperCase ( ) ) ; if ( ! StringUtils . isBlank ( coreConnections ) ) { options . setCoreConnectionsPerHost ( HostDistance . LOCAL , new Integer ( coreConnections ) ) ; } if ( ! StringUtils . isBlank ( maxConnectionsPerHost ) ) { options . setMaxConnectionsPerHost ( hostDist , new Integer ( maxConnectionsPerHost ) ) ; } if ( ! StringUtils . isBlank ( maxRequestsPerConnection ) ) { options . setMaxRequestsPerConnection ( hostDist , new Integer ( maxRequestsPerConnection ) ) ; } } return options ;
public class DatagramSocketSettings { /** * endregion */ public void applySettings ( @ NotNull DatagramChannel channel ) throws IOException { } }
if ( receiveBufferSize != 0 ) { channel . setOption ( SO_RCVBUF , receiveBufferSize ) ; } if ( sendBufferSize != 0 ) { channel . setOption ( SO_SNDBUF , sendBufferSize ) ; } if ( reuseAddress != DEF_BOOL ) { channel . setOption ( SO_REUSEADDR , reuseAddress != FALSE ) ; } if ( broadcast != DEF_BOOL ) { channel . setOption ( SO_BROADCAST , broadcast != FALSE ) ; }
public class CmsJlanRepository { /** * Checks if a user may access this repository . < p > * @ param user the name of the user * @ return true if the user may access the repository */ public boolean allowAccess ( String user ) { } }
try { return m_cms . getPermissions ( m_root , user ) . requiresViewPermission ( ) ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; return true ; }
public class CdiSpiHelper { /** * Generates a unique signature for a collection of annotations . */ private static String createAnnotationCollectionId ( Collection < Annotation > annotations ) { } }
if ( annotations . isEmpty ( ) ) return "" ; return annotations . stream ( ) . sorted ( comparing ( a -> a . annotationType ( ) . getName ( ) ) ) . map ( CdiSpiHelper :: createAnnotationId ) . collect ( joining ( "," , "[" , "]" ) ) ;
public class SipSessionsUtilImpl { /** * { @ inheritDoc } */ public SipSession getCorrespondingSipSession ( SipSession sipSession , String headerName ) { } }
MobicentsSipSession correspondingSipSession = null ; if ( headerName . equalsIgnoreCase ( JoinHeader . NAME ) ) { correspondingSipSession = joinSession . get ( ( ( MobicentsSipSession ) sipSession ) . getKey ( ) ) ; } else if ( headerName . equalsIgnoreCase ( ReplacesHeader . NAME ) ) { correspondingSipSession = replacesSession . get ( ( ( MobicentsSipSession ) sipSession ) . getKey ( ) ) ; } else { throw new IllegalArgumentException ( "headerName argument should either be one of Join or Replaces" ) ; } return correspondingSipSession ;
public class Ftp { /** * copy a local file to server * @ return FTPClient * @ throws IOException * @ throws PageException */ private AFTPClient actionPutFile ( ) throws IOException , PageException { } }
required ( "remotefile" , remotefile ) ; required ( "localfile" , localfile ) ; AFTPClient client = getClient ( ) ; Resource local = ResourceUtil . toResourceExisting ( pageContext , localfile ) ; // new File ( localfile ) ; // if ( failifexists & & local . exists ( ) ) throw new ApplicationException ( " File [ " + local + " ] already // exist , if you want to overwrite , set attribute // failIfExists to false " ) ; InputStream is = null ; try { is = IOUtil . toBufferedInputStream ( local . getInputStream ( ) ) ; client . setFileType ( getType ( local ) ) ; client . storeFile ( remotefile , is ) ; } finally { IOUtil . closeEL ( is ) ; } writeCfftp ( client ) ; return client ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcDraughtingPreDefinedCurveFont ( ) { } }
if ( ifcDraughtingPreDefinedCurveFontEClass == null ) { ifcDraughtingPreDefinedCurveFontEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 177 ) ; } return ifcDraughtingPreDefinedCurveFontEClass ;
public class Record { /** * Get the record type from the field that specifies the record type . * ( Override this ) . * @ return The record type ( as an object ) . */ public BaseField getSharedRecordTypeKey ( ) { } }
if ( this . getFieldCount ( ) >= 2 ) // Wild guess ( typically the second field ) if ( this . getField ( DBConstants . MAIN_FIELD + 1 ) instanceof IntegerField ) return this . getField ( DBConstants . MAIN_FIELD + 1 ) ; return null ;
public class WorkbinsApi { /** * Get the content of multiple Workbins . * @ param getWorkbinsContentData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > getWorkbinsContentWithHttpInfo ( GetWorkbinsContentData getWorkbinsContentData ) throws ApiException { } }
com . squareup . okhttp . Call call = getWorkbinsContentValidateBeforeCall ( getWorkbinsContentData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class ShowImages { /** * Creates a window showing the specified image . */ public static ImagePanel showWindow ( BufferedImage img , String title , boolean closeOnExit ) { } }
JFrame frame = new JFrame ( title ) ; ImagePanel panel = new ImagePanel ( img ) ; panel . setScaling ( ScaleOptions . DOWN ) ; // If the window will be too large to be displayed on the monitor set the bounds to something that can be // shown . The default behavior will just change one axis leaving it to have an awkward appearance Rectangle monitorBounds = frame . getGraphicsConfiguration ( ) . getBounds ( ) ; if ( monitorBounds . width < img . getWidth ( ) || monitorBounds . height < img . getHeight ( ) ) { double scale = Math . min ( monitorBounds . width / ( double ) img . getWidth ( ) , monitorBounds . height / ( double ) img . getHeight ( ) ) ; int width = ( int ) ( scale * img . getWidth ( ) ) ; int height = ( int ) ( scale * img . getHeight ( ) ) ; panel . setPreferredSize ( new Dimension ( width , height ) ) ; } frame . add ( panel , BorderLayout . CENTER ) ; frame . pack ( ) ; frame . setLocationByPlatform ( true ) ; frame . setVisible ( true ) ; if ( closeOnExit ) frame . setDefaultCloseOperation ( JFrame . EXIT_ON_CLOSE ) ; return panel ;
public class FaunusPipeline { /** * Apply the provided closure to the current element and emit the result . * @ param closure the closure to apply to the element * @ return the extended FaunusPipeline */ public FaunusPipeline transform ( final String closure ) { } }
this . state . assertNotLocked ( ) ; this . state . assertNoProperty ( ) ; this . compiler . addMap ( TransformMap . Map . class , NullWritable . class , FaunusVertex . class , TransformMap . createConfiguration ( this . state . getElementType ( ) , this . validateClosure ( closure ) ) ) ; this . state . lock ( ) ; makeMapReduceString ( TransformMap . class ) ; return this ;
public class MapsforgeTilesGenerator { /** * Get tile data for a given lat / lon / zoomlevel . * @ param lon the WGS84 longitude . * @ param lat the WGS84 latitude . * @ param zoom the zoomlevel * @ param adaptee the class to adapt to . * @ return the generated data . * @ throws IOException */ public < T > T getTile4LatLon ( double lon , double lat , int zoom , Class < T > adaptee ) throws IOException { } }
final int ty = MercatorProjection . latitudeToTileY ( lat , ( byte ) zoom ) ; final int tx = MercatorProjection . longitudeToTileX ( lon , ( byte ) zoom ) ; return getTile4TileCoordinate ( ty , tx , zoom , adaptee ) ;
public class DefaultGroovyMethods { /** * Iterates over the elements of an iterable collection of items , starting * from a specified startIndex , and returns the index of the last item that * matches the condition specified in the closure . * @ param self the iteration object over which to iterate * @ param startIndex start matching from this index * @ param closure the filter to perform a match on the collection * @ return an integer that is the index of the last matched object or - 1 if no match was found * @ since 1.5.2 */ public static int findLastIndexOf ( Object self , int startIndex , Closure closure ) { } }
int result = - 1 ; int i = 0 ; BooleanClosureWrapper bcw = new BooleanClosureWrapper ( closure ) ; for ( Iterator iter = InvokerHelper . asIterator ( self ) ; iter . hasNext ( ) ; i ++ ) { Object value = iter . next ( ) ; if ( i < startIndex ) { continue ; } if ( bcw . call ( value ) ) { result = i ; } } return result ;
public class GenericEncodingStrategy { /** * Push decoding instanceVar to stack in preparation to calling * storePropertyValue . * @ param ordinal zero - based property ordinal , used only if instanceVar * refers to an object array . * @ param instanceVar local variable referencing Storable instance , * defaults to " this " if null . If variable type is an Object array , then * property values are written to the runtime value of this array instead * of a Storable instance . * @ see # storePropertyValue storePropertyValue */ protected void pushDecodingInstanceVar ( CodeAssembler a , int ordinal , LocalVariable instanceVar ) { } }
if ( instanceVar == null ) { // Push this to stack in preparation for storing a property . a . loadThis ( ) ; } else if ( instanceVar . getType ( ) != TypeDesc . forClass ( Object [ ] . class ) ) { // Push reference to stack in preparation for storing a property . a . loadLocal ( instanceVar ) ; } else { // Push array and index to stack in preparation for storing a property . a . loadLocal ( instanceVar ) ; a . loadConstant ( ordinal ) ; }
public class MediaWikiApiImpl { /** * handle the given error Message according to the exception setting * @ param errMsg * @ throws Exception */ protected void handleError ( String errMsg ) throws Exception { } }
// log it LOGGER . log ( Level . SEVERE , errMsg ) ; // and throw an error if this is configured if ( this . isThrowExceptionOnError ( ) ) { throw new Exception ( errMsg ) ; }
public class IOUtil { /** * copy a inputstream to a outputstream * @ param in * @ param out * @ param closeIS * @ param closeOS * @ throws IOException */ public static final void merge ( InputStream in1 , InputStream in2 , OutputStream out , boolean closeIS1 , boolean closeIS2 , boolean closeOS ) throws IOException { } }
try { merge ( in1 , in2 , out , 0xffff ) ; } finally { if ( closeIS1 ) closeEL ( in1 ) ; if ( closeIS2 ) closeEL ( in2 ) ; if ( closeOS ) closeEL ( out ) ; }
public class Database { /** * Retrieve the document with the specified ID from the database and deserialize to an * instance of the POJO of type T . Uses the additional parameters specified when making the * { @ code GET } request . * < P > Example usage to get inline attachments : < / P > * < pre > * { @ code * Foo foo = db . find ( Foo . class , " exampleId " , new Params ( ) . attachments ( ) ) ; * String attachmentData = foo . getAttachments ( ) . get ( " attachment . txt " ) . getData ( ) ; * < / pre > * @ param < T > object type * @ param classType the class of type T * @ param id the document id * @ param params extra parameters to append * @ return An object of type T * @ throws NoDocumentException if the document is not found in the database . * @ see Params * @ see < a * href = " https : / / console . bluemix . net / docs / services / Cloudant / api / document . html # read " * target = " _ blank " > Documents - read < / a > */ public < T > T find ( Class < T > classType , String id , Params params ) { } }
assertNotEmpty ( params , "params" ) ; return db . find ( classType , id , params . getInternalParams ( ) ) ;
public class StaticFileWeb { /** * Service a request . * @ param request the http request facade * @ param response the http response facade */ @ Override public void service ( RequestWeb req ) { } }
init ( ) ; String pathInfo = req . pathInfo ( ) ; if ( pathInfo . isEmpty ( ) || pathInfo . equals ( "/" ) ) { pathInfo = _config . get ( "server.index" , "index.html" ) ; } else { pathInfo = pathInfo . substring ( 1 ) ; } Path path = _root . resolve ( pathInfo ) ; // PathImpl path = Vfs . lookup ( root ) . lookup ( " . / " + pathInfo ) ; if ( Files . isDirectory ( path ) ) { path = path . resolve ( _config . get ( "server.index" , "index.html" ) ) ; } long len ; try { len = Files . size ( path ) ; } catch ( IOException e ) { req . fail ( new FileNotFoundException ( "file not found: " + req . uri ( ) ) ) ; return ; } try ( InputStream is = Files . newInputStream ( path ) ) { if ( is == null ) { req . fail ( new FileNotFoundException ( pathInfo ) ) ; return ; } String mimeType = mimeType ( pathInfo ) ; if ( mimeType != null ) { req . header ( "content-type" , mimeType ) ; } else { req . header ( "content-type" , "text/plain; charset=utf-8" ) ; } // XXX : if ( len > 0 ) { req . length ( len ) ; } TempBuffer tBuf = TempBuffer . create ( ) ; byte [ ] buffer = tBuf . buffer ( ) ; int sublen ; while ( ( sublen = is . read ( buffer , 0 , buffer . length ) ) > 0 ) { req . write ( buffer , 0 , sublen ) ; } tBuf . free ( ) ; } catch ( IOException e ) { req . fail ( e ) ; } req . ok ( ) ;
public class BlogEventProcessor { /** * # build - handler */ @ Override public ReadSideHandler < BlogEvent > buildHandler ( ) { } }
return new ReadSideHandler < BlogEvent > ( ) { @ Override public CompletionStage < Done > globalPrepare ( ) { return myDatabase . createTables ( ) ; } @ Override public CompletionStage < Offset > prepare ( AggregateEventTag < BlogEvent > tag ) { return myDatabase . loadOffset ( tag ) ; } @ Override public Flow < Pair < BlogEvent , Offset > , Done , ? > handle ( ) { return Flow . < Pair < BlogEvent , Offset > > create ( ) . mapAsync ( 1 , eventAndOffset -> myDatabase . handleEvent ( eventAndOffset . first ( ) , eventAndOffset . second ( ) ) ) ; } } ;
public class ValueInterpreter { /** * Convert signed bytes to a 32 - bit short float value . */ private static float bytesToFloat ( byte b0 , byte b1 , byte b2 , byte b3 ) { } }
int mantissa = unsignedToSigned ( unsignedByteToInt ( b0 ) + ( unsignedByteToInt ( b1 ) << 8 ) + ( unsignedByteToInt ( b2 ) << 16 ) , 24 ) ; return ( float ) ( mantissa * Math . pow ( 10 , b3 ) ) ;
public class CreateFunctionRequest { /** * A list of < a href = " https : / / docs . aws . amazon . com / lambda / latest / dg / configuration - layers . html " > function layers < / a > to * add to the function ' s execution environment . Specify each layer by its ARN , including the version . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLayers ( java . util . Collection ) } or { @ link # withLayers ( java . util . Collection ) } if you want to override the * existing values . * @ param layers * A list of < a href = " https : / / docs . aws . amazon . com / lambda / latest / dg / configuration - layers . html " > function * layers < / a > to add to the function ' s execution environment . Specify each layer by its ARN , including the * version . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateFunctionRequest withLayers ( String ... layers ) { } }
if ( this . layers == null ) { setLayers ( new com . amazonaws . internal . SdkInternalList < String > ( layers . length ) ) ; } for ( String ele : layers ) { this . layers . add ( ele ) ; } return this ;
public class Assembly { /** * Prepares all files added for tus uploads . * @ param assemblyUrl the assembly url affiliated with the tus upload . * @ throws IOException when there ' s a failure with file retrieval . * @ throws ProtocolException when there ' s a failure with tus upload . */ protected void processTusFiles ( String assemblyUrl ) throws IOException , ProtocolException { } }
tusClient . setUploadCreationURL ( new URL ( getClient ( ) . getHostUrl ( ) + "/resumable/files/" ) ) ; tusClient . enableResuming ( tusURLStore ) ; for ( Map . Entry < String , File > entry : files . entrySet ( ) ) { processTusFile ( entry . getValue ( ) , entry . getKey ( ) , assemblyUrl ) ; } for ( Map . Entry < String , InputStream > entry : fileStreams . entrySet ( ) ) { processTusFile ( entry . getValue ( ) , entry . getKey ( ) , assemblyUrl ) ; }
public class ClientSocketFactory { /** * Called when the socket read / write fails . */ @ Override public void failSocket ( long time ) { } }
getRequestFailProbe ( ) . start ( ) ; _failCountTotal . incrementAndGet ( ) ; logFinest ( L . l ( "failSocket: time={0}, _failTime={1}" , time , _failTime ) ) ; synchronized ( this ) { if ( _failTime < time ) { degrade ( time ) ; _firstSuccessTime = 0 ; _failTime = time ; _lastFailTime = _failTime ; _dynamicFailRecoverTime = Math . min ( 2 * _dynamicFailRecoverTime , _loadBalanceFailRecoverTime ) ; _state = _state . toFail ( ) ; } }
public class GenericUtils { /** * Utility method to count the total number of " used " bytes in the list of * buffers . This would represent the size of the data if it was printed * out . * @ param list * @ return int */ static public int sizeOf ( WsByteBuffer [ ] list ) { } }
if ( null == list ) { return 0 ; } int size = 0 ; for ( int i = 0 ; i < list . length ; i ++ ) { if ( null != list [ i ] ) { size += list [ i ] . remaining ( ) ; } } return size ;
public class CmsMappingResolutionContext { /** * Writes all the stored URL name mappings to the database . < p > * @ throws CmsException if something goes wrong */ public void commitUrlNameMappings ( ) throws CmsException { } }
Set < CmsUUID > structureIds = Sets . newHashSet ( ) ; for ( InternalUrlNameMappingEntry entry : m_urlNameMappingEntries ) { structureIds . add ( entry . getStructureId ( ) ) ; } boolean urlnameReplace = false ; for ( CmsUUID structureId : structureIds ) { try { CmsResource resource = m_cms . readResource ( structureId , CmsResourceFilter . ALL ) ; CmsProperty prop = m_cms . readPropertyObject ( resource , CmsPropertyDefinition . PROPERTY_URLNAME_REPLACE , true ) ; if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( prop . getValue ( ) ) ) { urlnameReplace = Boolean . parseBoolean ( prop . getValue ( ) ) ; } } catch ( CmsException e ) { LOG . error ( "Error while trying to read urlname.replace: " + e . getLocalizedMessage ( ) , e ) ; } } I_CmsFileNameGenerator nameGen = OpenCms . getResourceManager ( ) . getNameGenerator ( ) ; for ( InternalUrlNameMappingEntry entry : m_urlNameMappingEntries ) { Iterator < String > nameSeq = nameGen . getUrlNameSequence ( entry . getName ( ) ) ; m_cms . writeUrlNameMapping ( nameSeq , entry . getStructureId ( ) , entry . getLocale ( ) . toString ( ) , urlnameReplace ) ; }
public class FunctionExtensions { /** * Curries a function that takes one argument . * @ param function * the original function . May not be < code > null < / code > . * @ param argument * the fixed argument . * @ return a function that takes no arguments . Never < code > null < / code > . */ @ Pure public static < P1 , RESULT > Function0 < RESULT > curry ( final Function1 < ? super P1 , ? extends RESULT > function , final P1 argument ) { } }
if ( function == null ) throw new NullPointerException ( "function" ) ; return new Function0 < RESULT > ( ) { @ Override public RESULT apply ( ) { return function . apply ( argument ) ; } } ;
public class StackErrorMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StackError stackError , ProtocolMarshaller protocolMarshaller ) { } }
if ( stackError == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stackError . getErrorCode ( ) , ERRORCODE_BINDING ) ; protocolMarshaller . marshall ( stackError . getErrorMessage ( ) , ERRORMESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SystemViewImporter { /** * endPrimaryType . * @ return * @ throws PathNotFoundException * @ throws RepositoryException * @ throws NoSuchNodeTypeException */ private ImportPropertyData endPrimaryType ( ) throws PathNotFoundException , RepositoryException , NoSuchNodeTypeException { } }
ImportPropertyData propertyData ; String sName = propertyInfo . getValues ( ) . get ( 0 ) . toString ( ) ; InternalQName primaryTypeName = locationFactory . parseJCRName ( sName ) . getInternalName ( ) ; ImportNodeData nodeData = ( ImportNodeData ) tree . pop ( ) ; if ( ! Constants . ROOT_UUID . equals ( nodeData . getIdentifier ( ) ) ) { NodeData parentNodeData = getParent ( ) ; // nodeTypeDataManager . findChildNodeDefinition ( primaryTypeName , ) // check is nt : versionedChild subnode of frozenNode if ( nodeData . getQPath ( ) . getDepth ( ) > 6 && primaryTypeName . equals ( Constants . NT_VERSIONEDCHILD ) && nodeData . getQPath ( ) . getEntries ( ) [ 5 ] . equals ( Constants . JCR_FROZENNODE ) ) { // do nothing } else if ( ! nodeTypeDataManager . isChildNodePrimaryTypeAllowed ( nodeData . getQName ( ) , primaryTypeName , parentNodeData . getPrimaryTypeName ( ) , parentNodeData . getMixinTypeNames ( ) ) ) { throw new ConstraintViolationException ( "Can't add node " + nodeData . getQName ( ) . getAsString ( ) + " to " + parentNodeData . getQPath ( ) . getAsString ( ) + " node type " + sName + " is not allowed as child's node type for parent node type " + parentNodeData . getPrimaryTypeName ( ) . getAsString ( ) ) ; } } nodeData . addNodeType ( ( nodeTypeDataManager . getNodeType ( primaryTypeName ) ) ) ; nodeData . setPrimaryTypeName ( primaryTypeName ) ; propertyData = new ImportPropertyData ( QPath . makeChildPath ( nodeData . getQPath ( ) , propertyInfo . getName ( ) ) , propertyInfo . getIndentifer ( ) , - 1 , propertyInfo . getType ( ) , nodeData . getIdentifier ( ) , false ) ; propertyData . setValues ( parseValues ( ) ) ; tree . push ( nodeData ) ; return propertyData ;
public class ImageLoader { /** * Returns an ImageContainer for the requested URL . * The ImageContainer will contain either the specified default bitmap or the loaded bitmap . * If the default was returned , the { @ link ImageLoader } will be invoked when the * request is fulfilled . * @ param requestUrl The URL of the image to be loaded . */ public ImageContainer get ( String requestUrl , final ImageListener listener , final Object tag ) { } }
return get ( requestUrl , listener , 0 , 0 , tag ) ;
public class TCPWriteRequestContextImpl { /** * internal async write */ protected VirtualConnection writeInternal ( long numBytes , TCPWriteCompletedCallback writeCallback , boolean forceQueue , int time ) { } }
int timeout = time ; if ( timeout == IMMED_TIMEOUT ) { immediateTimeout ( ) ; return null ; } else if ( timeout == ABORT_TIMEOUT ) { abort ( ) ; immediateTimeout ( ) ; return null ; } // if using channel timeout , reset to that value if ( timeout == TCPRequestContext . USE_CHANNEL_TIMEOUT ) { timeout = getConfig ( ) . getInactivityTimeout ( ) ; } setIOAmount ( numBytes ) ; setLastIOAmt ( 0 ) ; setIODoneAmount ( 0 ) ; setWriteCompletedCallback ( writeCallback ) ; setForceQueue ( forceQueue ) ; setTimeoutTime ( timeout ) ; return processAsyncWriteRequest ( ) ;
public class Introspector { /** * = = = = = helper mthod = = = = = */ private void initJavaClassCache ( Class clazz ) { } }
String clazzName = clazz . getName ( ) ; Class cl = classCache . get ( clazzName ) ; if ( null == cl ) { synchronized ( clazz ) { // 进行锁控制 cl = classCache . get ( clazzName ) ; // double check if ( cl != null ) { return ; } Method [ ] methods = clazz . getMethods ( ) ; for ( Method m : methods ) { String key = buildMethodKey ( clazzName , m . getName ( ) , m . getParameterTypes ( ) ) ; methodCache . put ( key , m ) ; } allMethodCache . put ( clazzName , methods ) ; } }
public class MultiLayerNetwork { /** * Returns the number of parameters in the network * @ param backwards If true : exclude any parameters uned only in unsupervised layerwise training ( such as the decoder * parameters in an autoencoder ) * @ return The number of parameters */ @ Override public long numParams ( boolean backwards ) { } }
int length = 0 ; for ( int i = 0 ; i < layers . length ; i ++ ) length += layers [ i ] . numParams ( backwards ) ; return length ;
public class SDMath { /** * Cosine similarity pairwise reduction operation . The output contains the cosine similarity for each tensor / subset * along the specified dimensions : < br > * out = ( sum _ i x [ i ] * y [ i ] ) / ( sqrt ( sum _ i x [ i ] ^ 2 ) * sqrt ( sum _ i y [ i ] ^ 2) * @ param x Input variable x * @ param y Input variable y * @ param dimensions Dimensions to calculate cosine similarity over * @ return Output variable */ public SDVariable cosineSimilarity ( String name , SDVariable x , SDVariable y , int ... dimensions ) { } }
validateNumerical ( "cosine similarity" , x , y ) ; SDVariable cosim = f ( ) . cosineSimilarity ( x , y , dimensions ) ; return updateVariableNameAndReference ( cosim , name ) ;
public class CmsModuleUpdater { /** * Compares the relation ( not defined in content ) for a resource with those to be imported , and makes * the necessary modifications . * @ param cms the CMS context * @ param importResource the resource * @ param relations the relations to be imported * @ throws CmsException if something goes wrong */ private void updateRelations ( CmsObject cms , CmsResource importResource , List < RelationData > relations ) throws CmsException { } }
Map < String , CmsRelationType > relTypes = new HashMap < > ( ) ; for ( CmsRelationType relType : OpenCms . getResourceManager ( ) . getRelationTypes ( ) ) { relTypes . put ( relType . getName ( ) , relType ) ; } Set < CmsRelation > existingRelations = Sets . newHashSet ( cms . readRelations ( CmsRelationFilter . relationsFromStructureId ( importResource . getStructureId ( ) ) ) ) ; List < CmsRelation > noContentRelations = existingRelations . stream ( ) . filter ( rel -> ! rel . getType ( ) . isDefinedInContent ( ) ) . collect ( Collectors . toList ( ) ) ; Set < CmsRelation > newRelations = new HashSet < > ( ) ; for ( RelationData rel : relations ) { if ( ! rel . getType ( ) . isDefinedInContent ( ) ) { newRelations . add ( new CmsRelation ( importResource . getStructureId ( ) , importResource . getRootPath ( ) , rel . getTargetId ( ) , rel . getTarget ( ) , rel . getType ( ) ) ) ; } } if ( ! newRelations . equals ( noContentRelations ) ) { CmsRelationFilter relFilter = CmsRelationFilter . TARGETS . filterNotDefinedInContent ( ) ; cms . deleteRelationsFromResource ( importResource , relFilter ) ; for ( CmsRelation newRel : newRelations ) { cms . addRelationToResource ( importResource , cms . readResource ( newRel . getTargetId ( ) , CmsResourceFilter . IGNORE_EXPIRATION ) , newRel . getType ( ) . getName ( ) ) ; } }
public class DefaultExceptionMapper { /** * { @ inheritDoc } */ @ Override public Response toResponse ( Throwable exception ) { } }
int status = parseHttpStatus ( exception ) ; ErrorMessage message = new ErrorMessage ( ) ; if ( exception instanceof MappableException && exception . getCause ( ) != null ) { exception = exception . getCause ( ) ; } message . setCode ( Hashing . murmur3_32 ( ) . hashUnencodedChars ( exception . getClass ( ) . getName ( ) ) . toString ( ) ) ; message . setStatus ( status ) ; message . setThrowable ( exception ) ; message . setMessage ( parseMessage ( exception , status ) ) ; message . setDescription ( parseDescription ( exception , status ) ) ; message . setErrors ( parseErrors ( exception , status ) ) ; MediaType type = ExceptionMapperUtils . getResponseType ( status ) ; if ( status == 500 ) { String uri = "" ; if ( Requests . getRequest ( ) != null ) { uri = " > " + Requests . getUriInfo ( ) . getRequestUri ( ) ; } logger . error ( message . getMessage ( ) + uri , exception ) ; } else if ( status == 404 ) { Requests . setProperty ( BEFORE_EXCEPTION_KEY , exception ) ; } return Response . status ( status ) . type ( type ) . entity ( message ) . build ( ) ;
public class AbstractStreamOperator { @ Override public void setup ( StreamTask < ? , ? > containingTask , StreamConfig config , Output < StreamRecord < OUT > > output ) { } }
final Environment environment = containingTask . getEnvironment ( ) ; this . container = containingTask ; this . config = config ; try { OperatorMetricGroup operatorMetricGroup = environment . getMetricGroup ( ) . getOrAddOperator ( config . getOperatorID ( ) , config . getOperatorName ( ) ) ; this . output = new CountingOutput ( output , operatorMetricGroup . getIOMetricGroup ( ) . getNumRecordsOutCounter ( ) ) ; if ( config . isChainStart ( ) ) { operatorMetricGroup . getIOMetricGroup ( ) . reuseInputMetricsForTask ( ) ; } if ( config . isChainEnd ( ) ) { operatorMetricGroup . getIOMetricGroup ( ) . reuseOutputMetricsForTask ( ) ; } this . metrics = operatorMetricGroup ; } catch ( Exception e ) { LOG . warn ( "An error occurred while instantiating task metrics." , e ) ; this . metrics = UnregisteredMetricGroups . createUnregisteredOperatorMetricGroup ( ) ; this . output = output ; } try { Configuration taskManagerConfig = environment . getTaskManagerInfo ( ) . getConfiguration ( ) ; int historySize = taskManagerConfig . getInteger ( MetricOptions . LATENCY_HISTORY_SIZE ) ; if ( historySize <= 0 ) { LOG . warn ( "{} has been set to a value equal or below 0: {}. Using default." , MetricOptions . LATENCY_HISTORY_SIZE , historySize ) ; historySize = MetricOptions . LATENCY_HISTORY_SIZE . defaultValue ( ) ; } final String configuredGranularity = taskManagerConfig . getString ( MetricOptions . LATENCY_SOURCE_GRANULARITY ) ; LatencyStats . Granularity granularity ; try { granularity = LatencyStats . Granularity . valueOf ( configuredGranularity . toUpperCase ( Locale . ROOT ) ) ; } catch ( IllegalArgumentException iae ) { granularity = LatencyStats . Granularity . OPERATOR ; LOG . warn ( "Configured value {} option for {} is invalid. Defaulting to {}." , configuredGranularity , MetricOptions . LATENCY_SOURCE_GRANULARITY . key ( ) , granularity ) ; } TaskManagerJobMetricGroup jobMetricGroup = this . metrics . parent ( ) . parent ( ) ; this . latencyStats = new LatencyStats ( jobMetricGroup . addGroup ( "latency" ) , historySize , container . getIndexInSubtaskGroup ( ) , getOperatorID ( ) , granularity ) ; } catch ( Exception e ) { LOG . warn ( "An error occurred while instantiating latency metrics." , e ) ; this . latencyStats = new LatencyStats ( UnregisteredMetricGroups . createUnregisteredTaskManagerJobMetricGroup ( ) . addGroup ( "latency" ) , 1 , 0 , new OperatorID ( ) , LatencyStats . Granularity . SINGLE ) ; } this . runtimeContext = new StreamingRuntimeContext ( this , environment , container . getAccumulatorMap ( ) ) ; stateKeySelector1 = config . getStatePartitioner ( 0 , getUserCodeClassloader ( ) ) ; stateKeySelector2 = config . getStatePartitioner ( 1 , getUserCodeClassloader ( ) ) ;
public class TreeCrossover { /** * the abstract " crossover " method usually don ' t have to do additional casts . */ private < A > void crossover ( final MSeq < Chromosome < G > > c1 , final MSeq < Chromosome < G > > c2 , final int index ) { } }
@ SuppressWarnings ( "unchecked" ) final TreeNode < A > tree1 = ( TreeNode < A > ) TreeNode . ofTree ( c1 . get ( index ) . getGene ( ) ) ; @ SuppressWarnings ( "unchecked" ) final TreeNode < A > tree2 = ( TreeNode < A > ) TreeNode . ofTree ( c2 . get ( index ) . getGene ( ) ) ; crossover ( tree1 , tree2 ) ; final FlatTreeNode < A > flat1 = FlatTreeNode . of ( tree1 ) ; final FlatTreeNode < A > flat2 = FlatTreeNode . of ( tree2 ) ; @ SuppressWarnings ( "unchecked" ) final TreeGene < A , ? > template = ( TreeGene < A , ? > ) c1 . get ( 0 ) . getGene ( ) ; final ISeq < G > genes1 = flat1 . map ( tree -> gene ( template , tree ) ) ; final ISeq < G > genes2 = flat2 . map ( tree -> gene ( template , tree ) ) ; c1 . set ( index , c1 . get ( index ) . newInstance ( genes1 ) ) ; c2 . set ( index , c2 . get ( index ) . newInstance ( genes2 ) ) ;
public class CmsSetupBean { /** * Reads all properties from the components . properties file at the given location , a folder or a zip file . < p > * @ param location the location to read the properties from * @ return the read properties * @ throws FileNotFoundException if the properties file could not be found * @ throws CmsConfigurationException if the something else goes wrong */ protected CmsParameterConfiguration getComponentsProperties ( String location ) throws FileNotFoundException , CmsConfigurationException { } }
InputStream stream = null ; ZipFile zipFile = null ; try { // try to interpret the fileName as a folder File folder = new File ( location ) ; // if it is a file it must be a zip - file if ( folder . isFile ( ) ) { zipFile = new ZipFile ( location ) ; ZipEntry entry = zipFile . getEntry ( COMPONENTS_PROPERTIES ) ; // path to file might be relative , too if ( ( entry == null ) && location . startsWith ( "/" ) ) { entry = zipFile . getEntry ( location . substring ( 1 ) ) ; } if ( entry == null ) { zipFile . close ( ) ; throw new FileNotFoundException ( org . opencms . importexport . Messages . get ( ) . getBundle ( ) . key ( org . opencms . importexport . Messages . LOG_IMPORTEXPORT_FILE_NOT_FOUND_IN_ZIP_1 , location + "/" + COMPONENTS_PROPERTIES ) ) ; } stream = zipFile . getInputStream ( entry ) ; } else { // it is a folder File file = new File ( folder , COMPONENTS_PROPERTIES ) ; stream = new FileInputStream ( file ) ; } return new CmsParameterConfiguration ( stream ) ; } catch ( Throwable ioe ) { if ( stream != null ) { try { stream . close ( ) ; } catch ( IOException e ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } } if ( zipFile != null ) { try { zipFile . close ( ) ; } catch ( IOException e ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } } if ( ioe instanceof FileNotFoundException ) { throw ( FileNotFoundException ) ioe ; } CmsMessageContainer msg = org . opencms . importexport . Messages . get ( ) . container ( org . opencms . importexport . Messages . ERR_IMPORTEXPORT_ERROR_READING_FILE_1 , location + "/" + COMPONENTS_PROPERTIES ) ; if ( LOG . isErrorEnabled ( ) ) { LOG . error ( msg . key ( ) , ioe ) ; } throw new CmsConfigurationException ( msg , ioe ) ; }
public class EUI48XmlAdapter { /** * Converts the given { @ link EUI48 } value to its string representation . Returns { @ code null } if * { @ code val } is { @ code null } . * @ param val The EUI - 48 value . * @ return The string representation of { @ code val } . * @ see EUI48 # toString ( ) */ @ Override public String marshal ( EUI48 val ) throws Exception { } }
return val == null ? null : val . toString ( ) ;
public class ServerService { /** * Get list public IPs for provided server reference { @ code server } * @ param server server reference * @ return list public IPs */ public List < PublicIpMetadata > findPublicIp ( Server server ) { } }
ServerMetadata metadata = findByRef ( server ) ; return findPublicIp ( metadata ) ;
public class DialogPlusBuilder { /** * Sets the given view as footer . * @ param fixed is used to determine whether footer should be fixed or not . Fixed if true , scrollable otherwise */ public DialogPlusBuilder setFooter ( @ NonNull View view , boolean fixed ) { } }
this . footerView = view ; this . fixedFooter = fixed ; return this ;
public class VFSStoreResource { /** * Obtains a list of prepared transaction branches from a resource manager . * @ param _ flag flag * @ return always < code > null < / code > */ @ Override public Xid [ ] recover ( final int _flag ) { } }
if ( VFSStoreResource . LOG . isDebugEnabled ( ) ) { VFSStoreResource . LOG . debug ( "recover (flag = " + _flag + ")" ) ; } return null ;
public class InternalSARLLexer { /** * $ ANTLR start " RULE _ COMMENT _ RICH _ TEXT _ INBETWEEN " */ public final void mRULE_COMMENT_RICH_TEXT_INBETWEEN ( ) throws RecognitionException { } }
try { int _type = RULE_COMMENT_RICH_TEXT_INBETWEEN ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 48777:34 : ( ' \ \ uFFFD \ \ uFFFD ' ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' ) ? ) // InternalSARL . g : 48777:36 : ' \ \ uFFFD \ \ uFFFD ' ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' ) ? { match ( "\uFFFD\uFFFD" ) ; // InternalSARL . g : 48777:51 : ( ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) ) * loop22 : do { int alt22 = 2 ; int LA22_0 = input . LA ( 1 ) ; if ( ( ( LA22_0 >= '\u0000' && LA22_0 <= '\t' ) || ( LA22_0 >= '\u000B' && LA22_0 <= '\f' ) || ( LA22_0 >= '\u000E' && LA22_0 <= '\uFFFF' ) ) ) { alt22 = 1 ; } switch ( alt22 ) { case 1 : // InternalSARL . g : 48777:51 : ~ ( ( ' \ \ n ' | ' \ \ r ' ) ) { if ( ( input . LA ( 1 ) >= '\u0000' && input . LA ( 1 ) <= '\t' ) || ( input . LA ( 1 ) >= '\u000B' && input . LA ( 1 ) <= '\f' ) || ( input . LA ( 1 ) >= '\u000E' && input . LA ( 1 ) <= '\uFFFF' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : break loop22 ; } } while ( true ) ; // InternalSARL . g : 48777:67 : ( ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' ) ? int alt27 = 2 ; int LA27_0 = input . LA ( 1 ) ; if ( ( LA27_0 == '\n' || LA27_0 == '\r' ) ) { alt27 = 1 ; } switch ( alt27 ) { case 1 : // InternalSARL . g : 48777:68 : ( ' \ \ r ' ) ? ' \ \ n ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' { // InternalSARL . g : 48777:68 : ( ' \ \ r ' ) ? int alt23 = 2 ; int LA23_0 = input . LA ( 1 ) ; if ( ( LA23_0 == '\r' ) ) { alt23 = 1 ; } switch ( alt23 ) { case 1 : // InternalSARL . g : 48777:68 : ' \ \ r ' { match ( '\r' ) ; } break ; } match ( '\n' ) ; // InternalSARL . g : 48777:79 : ( RULE _ IN _ RICH _ STRING ) * loop24 : do { int alt24 = 2 ; int LA24_0 = input . LA ( 1 ) ; if ( ( LA24_0 == '\'' ) ) { int LA24_1 = input . LA ( 2 ) ; if ( ( LA24_1 == '\'' ) ) { int LA24_4 = input . LA ( 3 ) ; if ( ( ( LA24_4 >= '\u0000' && LA24_4 <= '&' ) || ( LA24_4 >= '(' && LA24_4 <= '\uFFFC' ) || ( LA24_4 >= '\uFFFE' && LA24_4 <= '\uFFFF' ) ) ) { alt24 = 1 ; } } else if ( ( ( LA24_1 >= '\u0000' && LA24_1 <= '&' ) || ( LA24_1 >= '(' && LA24_1 <= '\uFFFC' ) || ( LA24_1 >= '\uFFFE' && LA24_1 <= '\uFFFF' ) ) ) { alt24 = 1 ; } } else if ( ( ( LA24_0 >= '\u0000' && LA24_0 <= '&' ) || ( LA24_0 >= '(' && LA24_0 <= '\uFFFC' ) || ( LA24_0 >= '\uFFFE' && LA24_0 <= '\uFFFF' ) ) ) { alt24 = 1 ; } switch ( alt24 ) { case 1 : // InternalSARL . g : 48777:79 : RULE _ IN _ RICH _ STRING { mRULE_IN_RICH_STRING ( ) ; } break ; default : break loop24 ; } } while ( true ) ; // InternalSARL . g : 48777:100 : ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? int alt26 = 2 ; int LA26_0 = input . LA ( 1 ) ; if ( ( LA26_0 == '\'' ) ) { alt26 = 1 ; } switch ( alt26 ) { case 1 : // InternalSARL . g : 48777:101 : ' \ \ ' ' ( ' \ \ ' ' ) ? { match ( '\'' ) ; // InternalSARL . g : 48777:106 : ( ' \ \ ' ' ) ? int alt25 = 2 ; int LA25_0 = input . LA ( 1 ) ; if ( ( LA25_0 == '\'' ) ) { alt25 = 1 ; } switch ( alt25 ) { case 1 : // InternalSARL . g : 48777:106 : ' \ \ ' ' { match ( '\'' ) ; } break ; } } break ; } match ( '\uFFFD' ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { }
public class SystemUtil { /** * Determines the Java version of the executing JVM . * @ return Java version */ public static String getJavaVersion ( ) { } }
String [ ] sysPropParms = new String [ ] { "java.runtime.version" , "java.version" } ; for ( int i = 0 ; i < sysPropParms . length ; i ++ ) { String val = System . getProperty ( sysPropParms [ i ] ) ; if ( ! StringUtil . isEmpty ( val ) ) { return val ; } } return null ;
public class WeakFastHashMap { /** * Remove any mapping for this key , and return any previously * mapped value . * @ param key the key whose mapping is to be removed * @ return the value removed , or null */ @ Override public V remove ( Object key ) { } }
if ( fast ) { synchronized ( this ) { Map < K , V > temp = cloneMap ( map ) ; V result = temp . remove ( key ) ; map = temp ; return ( result ) ; } } else { synchronized ( map ) { return ( map . remove ( key ) ) ; } }
public class MetaClassImpl { /** * Constructor selection algorithm for Groovy 2.1.9 + . * This selection algorithm was introduced as a workaround for GROOVY - 6080 . Instead of generating an index between * 0 and N where N is the number of super constructors at the time the class is compiled , this algorithm uses * a hash of the constructor descriptor instead . * This has the advantage of letting the super class add new constructors while being binary compatible . But there * are still problems with this approach : * < ul > * < li > There ' s a risk of hash collision , even if it ' s very low ( two constructors of the same class must have the same hash ) < / li > * < li > If the super class adds a new constructor which takes as an argument a superclass of an existing constructor parameter and * that this new constructor is selected at runtime , it would not find it . < / li > * < / ul > * Hopefully in the last case , the error message is much nicer now since it explains that it ' s a binary incompatible change . * @ param arguments the actual constructor call arguments * @ return a hash used to identify the constructor to be called * @ since 2.1.9 */ private int selectConstructorAndTransformArguments1 ( Object [ ] arguments ) { } }
if ( arguments == null ) arguments = EMPTY_ARGUMENTS ; Class [ ] argClasses = MetaClassHelper . convertToTypeArray ( arguments ) ; MetaClassHelper . unwrap ( arguments ) ; CachedConstructor constructor = ( CachedConstructor ) chooseMethod ( "<init>" , constructors , argClasses ) ; if ( constructor == null ) { constructor = ( CachedConstructor ) chooseMethod ( "<init>" , constructors , argClasses ) ; } if ( constructor == null ) { throw new GroovyRuntimeException ( "Could not find matching constructor for: " + theClass . getName ( ) + "(" + InvokerHelper . toTypeString ( arguments ) + ")" ) ; } final String methodDescriptor = BytecodeHelper . getMethodDescriptor ( Void . TYPE , constructor . getNativeParameterTypes ( ) ) ; // keeping 3 bits for additional information such as vargs return BytecodeHelper . hashCode ( methodDescriptor ) ;
public class XPathBuilder { /** * this method is meant to be overridden by each component * @ param disabled disabled * @ return String */ protected String getItemPath ( boolean disabled ) { } }
String selector = getBaseItemPath ( ) ; String subPath = applyTemplateValue ( disabled ? "disabled" : "enabled" ) ; if ( subPath != null ) { selector += ! Strings . isNullOrEmpty ( selector ) ? " and " + subPath : subPath ; } Map < String , String [ ] > templatesValues = getTemplatesValues ( ) ; String [ ] tagAndPositions = templatesValues . get ( "tagAndPosition" ) ; List < String > tagAndPosition = new ArrayList < > ( ) ; if ( tagAndPositions != null ) { tagAndPosition . addAll ( Arrays . asList ( tagAndPositions ) ) ; tagAndPosition . add ( 0 , getTag ( ) ) ; } String tag ; if ( ! tagAndPosition . isEmpty ( ) ) { tag = applyTemplate ( "tagAndPosition" , tagAndPosition . toArray ( ) ) ; } else { tag = getTag ( ) ; } selector = getRoot ( ) + tag + ( ! Strings . isNullOrEmpty ( selector ) ? "[" + selector + "]" : "" ) ; return selector ;
public class X509CertInfo { /** * Marshal the contents of a " raw " certificate into a DER sequence . */ private void emit ( DerOutputStream out ) throws CertificateException , IOException { } }
DerOutputStream tmp = new DerOutputStream ( ) ; // version number , iff not V1 version . encode ( tmp ) ; // Encode serial number , issuer signing algorithm , issuer name // and validity serialNum . encode ( tmp ) ; algId . encode ( tmp ) ; if ( ( version . compare ( CertificateVersion . V1 ) == 0 ) && ( issuer . toString ( ) == null ) ) throw new CertificateParsingException ( "Null issuer DN not allowed in v1 certificate" ) ; issuer . encode ( tmp ) ; interval . encode ( tmp ) ; // Encode subject ( principal ) and associated key if ( ( version . compare ( CertificateVersion . V1 ) == 0 ) && ( subject . toString ( ) == null ) ) throw new CertificateParsingException ( "Null subject DN not allowed in v1 certificate" ) ; subject . encode ( tmp ) ; pubKey . encode ( tmp ) ; // Encode issuerUniqueId & subjectUniqueId . if ( issuerUniqueId != null ) { issuerUniqueId . encode ( tmp , DerValue . createTag ( DerValue . TAG_CONTEXT , false , ( byte ) 1 ) ) ; } if ( subjectUniqueId != null ) { subjectUniqueId . encode ( tmp , DerValue . createTag ( DerValue . TAG_CONTEXT , false , ( byte ) 2 ) ) ; } // Write all the extensions . if ( extensions != null ) { extensions . encode ( tmp ) ; } // Wrap the data ; encoding of the " raw " cert is now complete . out . write ( DerValue . tag_Sequence , tmp ) ;
public class CoronaJobHistory { /** * Log start time of task ( TIP ) . * @ param taskId task id * @ param taskType MAP or REDUCE * @ param startTime startTime of tip . */ public void logTaskStarted ( TaskID taskId , String taskType , long startTime , String splitLocations ) { } }
if ( disableHistory ) { return ; } JobID id = taskId . getJobID ( ) ; if ( ! this . jobId . equals ( id ) ) { throw new RuntimeException ( "JobId from task: " + id + " does not match expected: " + jobId ) ; } if ( null != writers ) { log ( writers , RecordTypes . Task , new Keys [ ] { Keys . TASKID , Keys . TASK_TYPE , Keys . START_TIME , Keys . SPLITS } , new String [ ] { taskId . toString ( ) , taskType , String . valueOf ( startTime ) , splitLocations } ) ; }
public class CaseEventSupport { /** * fire * CaseCancelled */ public void fireBeforeCaseCancelled ( String caseId , CaseFileInstance caseFile , List < Long > processInstanceIds ) { } }
final Iterator < CaseEventListener > iter = getEventListenersIterator ( ) ; if ( iter . hasNext ( ) ) { final CaseCancelEvent event = new CaseCancelEvent ( identityProvider . getName ( ) , caseId , caseFile , processInstanceIds ) ; do { iter . next ( ) . beforeCaseCancelled ( event ) ; } while ( iter . hasNext ( ) ) ; }
public class PdfContentByte { /** * Sets the stroke color . < CODE > color < / CODE > can be an * < CODE > ExtendedColor < / CODE > . * @ param color the color */ public void setColorStroke ( Color color ) { } }
PdfXConformanceImp . checkPDFXConformance ( writer , PdfXConformanceImp . PDFXKEY_COLOR , color ) ; int type = ExtendedColor . getType ( color ) ; switch ( type ) { case ExtendedColor . TYPE_GRAY : { setGrayStroke ( ( ( GrayColor ) color ) . getGray ( ) ) ; break ; } case ExtendedColor . TYPE_CMYK : { CMYKColor cmyk = ( CMYKColor ) color ; setCMYKColorStrokeF ( cmyk . getCyan ( ) , cmyk . getMagenta ( ) , cmyk . getYellow ( ) , cmyk . getBlack ( ) ) ; break ; } case ExtendedColor . TYPE_SEPARATION : { SpotColor spot = ( SpotColor ) color ; setColorStroke ( spot . getPdfSpotColor ( ) , spot . getTint ( ) ) ; break ; } case ExtendedColor . TYPE_PATTERN : { PatternColor pat = ( PatternColor ) color ; setPatternStroke ( pat . getPainter ( ) ) ; break ; } case ExtendedColor . TYPE_SHADING : { ShadingColor shading = ( ShadingColor ) color ; setShadingStroke ( shading . getPdfShadingPattern ( ) ) ; break ; } default : setRGBColorStroke ( color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) ) ; }
public class Request { /** * Gets the query param , or returns default value * @ param queryParam the query parameter * @ param defaultValue the default value * @ return the value of the provided queryParam , or default if value is null * Example : query parameter ' id ' from the following request URI : / hello ? id = foo */ public String queryParamOrDefault ( String queryParam , String defaultValue ) { } }
String value = queryParams ( queryParam ) ; return value != null ? value : defaultValue ;
public class DescribeBackupsResult { /** * A list of backups . * @ param backups * A list of backups . */ public void setBackups ( java . util . Collection < Backup > backups ) { } }
if ( backups == null ) { this . backups = null ; return ; } this . backups = new java . util . ArrayList < Backup > ( backups ) ;
public class JMOptional { /** * Gets optional . * @ param string the string * @ return the optional */ public static Optional < String > getOptional ( String string ) { } }
return Optional . ofNullable ( string ) . filter ( getIsEmpty ( ) . negate ( ) ) ;
public class JsHdrsImpl { /** * Set the Byte field which indicates the subtype of the message . * @ param value The Byte value of the Message SubType field */ final void setSubtype ( Byte value ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setSubtype" , value ) ; jmo . setField ( JsHdrAccess . SUBTYPE , value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setSubtype" ) ;
public class BingAutoSuggestSearchImpl { /** * The AutoSuggest API lets you send a search query to Bing and get back a list of suggestions . This section provides technical details about the query parameters and headers that you use to request suggestions and the JSON response objects that contain them . * @ param query The user ' s search term . * @ param autoSuggestOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the Suggestions object if successful . */ public Suggestions autoSuggest ( String query , AutoSuggestOptionalParameter autoSuggestOptionalParameter ) { } }
return autoSuggestWithServiceResponseAsync ( query , autoSuggestOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class BasePanel { /** * Get the SField at this index . * @ param index location of the screen field . * @ return The screen field at this location . */ public ScreenField getSField ( int index ) { } }
// If this screen cant accept a select BaseTable , find the one that can if ( ( index - DBConstants . MAIN_FIELD >= m_SFieldList . size ( ) ) || ( index < Constants . MAIN_FIELD ) ) return null ; try { return ( ScreenField ) m_SFieldList . elementAt ( index - Constants . MAIN_FIELD ) ; } catch ( ArrayIndexOutOfBoundsException e ) { } return null ; // Not found
public class ArrayHelper { /** * Check if the passed array contains at least one < code > null < / code > element . * @ param < T > * element type * @ param aArray * The array to check . May be < code > null < / code > . * @ return < code > true < / code > only if the passed array is neither * < code > null < / code > nor empty and if at least one < code > null < / code > * element is contained . */ public static < T > boolean containsAnyNullElement ( @ Nullable final T [ ] aArray ) { } }
if ( aArray != null ) for ( final T aObj : aArray ) if ( aObj == null ) return true ; return false ;
public class OcciIaasHandler { /** * ( non - Javadoc ) * @ see net . roboconf . target . api . TargetHandler * # terminateMachine ( net . roboconf . target . api . TargetHandlerParameters , java . lang . String ) */ @ Override public void terminateMachine ( TargetHandlerParameters parameters , String machineId ) throws TargetException { } }
try { // TODO remove next line when APIs get compatible . . . String postfix = ( parameters . getTargetProperties ( ) . get ( CloudautomationMixins . PROVIDER_ENDPOINT ) != null ? "" : "/compute" ) ; OcciVMUtils . deleteVM ( parameters . getTargetProperties ( ) . get ( SERVER_IP_PORT ) + postfix , machineId ) ; } catch ( TargetException ignore ) { // ignore }
public class DeliveryArtifactsPicker { /** * refresh all deliveries dependencies for a particular product */ public void work ( RepositoryHandler repoHandler , DbProduct product ) { } }
if ( ! product . getDeliveries ( ) . isEmpty ( ) ) { product . getDeliveries ( ) . forEach ( delivery -> { final Set < Artifact > artifacts = new HashSet < > ( ) ; final DataFetchingUtils utils = new DataFetchingUtils ( ) ; final DependencyHandler depHandler = new DependencyHandler ( repoHandler ) ; final Set < String > deliveryDependencies = utils . getDeliveryDependencies ( repoHandler , depHandler , delivery ) ; final Set < String > fullGAVCSet = deliveryDependencies . stream ( ) . filter ( DataUtils :: isFullGAVC ) . collect ( Collectors . toSet ( ) ) ; final Set < String > shortIdentiferSet = deliveryDependencies . stream ( ) . filter ( entry -> ! DataUtils . isFullGAVC ( entry ) ) . collect ( Collectors . toSet ( ) ) ; processDependencySet ( repoHandler , shortIdentiferSet , batch -> String . format ( BATCH_TEMPLATE_REGEX , StringUtils . join ( batch , '|' ) ) , 1 , artifacts :: add ) ; processDependencySet ( repoHandler , fullGAVCSet , batch -> QueryUtils . quoteIds ( batch , BATCH_TEMPLATE ) , 10 , artifacts :: add ) ; if ( ! artifacts . isEmpty ( ) ) { delivery . setAllArtifactDependencies ( new ArrayList < > ( artifacts ) ) ; } } ) ; repoHandler . store ( product ) ; }
public class ApiOvhIp { /** * Add reverse on an ip * REST : POST / ip / { ip } / reverse * @ param reverse [ required ] * @ param ipReverse [ required ] * @ param ip [ required ] */ public OvhReverseIp ip_reverse_POST ( String ip , String ipReverse , String reverse ) throws IOException { } }
String qPath = "/ip/{ip}/reverse" ; StringBuilder sb = path ( qPath , ip ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ipReverse" , ipReverse ) ; addBody ( o , "reverse" , reverse ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhReverseIp . class ) ;
public class HandlingEventRepositoryInMem { /** * Initilaze the in mem repository . * SpringIoC will call this init - method after the bean has bean created and * properties has been set . * @ throws ParseException */ public void init ( ) { } }
// CargoXYZ DeliverySpec deliverySpec = new DeliverySpec ( ) ; deliverySpec . setOrigin ( STOCKHOLM ) ; deliverySpec . setDestination ( MELBOURNE ) ; final Cargo cargoXYZ = new Cargo ( "XYZ" , deliverySpec ) ; registerEvent ( cargoXYZ , "2007-11-30" , HandlingEvent . Type . RECEIVE , null ) ; final CarrierMovement stockholmToHamburg = carrierMovementRepository . find ( "SESTO_DEHAM" ) ; registerEvent ( cargoXYZ , "2007-12-01" , HandlingEvent . Type . LOAD , stockholmToHamburg ) ; registerEvent ( cargoXYZ , "2007-12-02" , HandlingEvent . Type . UNLOAD , stockholmToHamburg ) ; final CarrierMovement hamburgToHongKong = carrierMovementRepository . find ( "DEHAM_CNHKG" ) ; registerEvent ( cargoXYZ , "2007-12-03" , HandlingEvent . Type . LOAD , hamburgToHongKong ) ; registerEvent ( cargoXYZ , "2007-12-05" , HandlingEvent . Type . UNLOAD , hamburgToHongKong ) ; // CargoZYX DeliverySpec deliverySpec2 = new DeliverySpec ( ) ; deliverySpec . setOrigin ( MELBOURNE ) ; deliverySpec . setDestination ( STOCKHOLM ) ; final Cargo cargoZYX = new Cargo ( "ZYX" , deliverySpec2 ) ; registerEvent ( cargoZYX , "2007-12-09" , HandlingEvent . Type . RECEIVE , null ) ; final CarrierMovement melbourneToTokyo = carrierMovementRepository . find ( "AUMEL_JPTOK" ) ; registerEvent ( cargoZYX , "2007-12-10" , HandlingEvent . Type . LOAD , melbourneToTokyo ) ; registerEvent ( cargoZYX , "2007-12-12" , HandlingEvent . Type . UNLOAD , melbourneToTokyo ) ; final CarrierMovement tokyoToLosAngeles = carrierMovementRepository . find ( "JPTOK_USLA" ) ; registerEvent ( cargoZYX , "2007-12-13" , HandlingEvent . Type . LOAD , tokyoToLosAngeles ) ; // CargoABC DeliverySpec deliverySpec3 = new DeliverySpec ( ) ; deliverySpec . setOrigin ( STOCKHOLM ) ; deliverySpec . setDestination ( HELSINKI ) ; final Cargo cargoABC = new Cargo ( "ABC" , deliverySpec3 ) ; registerEvent ( cargoABC , "2008-01-01" , HandlingEvent . Type . RECEIVE , null ) ; final CarrierMovement stockholmToHelsinki = new CarrierMovement ( "CAR_001" , STOCKHOLM , HELSINKI ) ; registerEvent ( cargoABC , "2008-01-02" , HandlingEvent . Type . LOAD , stockholmToHelsinki ) ; registerEvent ( cargoABC , "2008-01-03" , HandlingEvent . Type . UNLOAD , stockholmToHelsinki ) ; registerEvent ( cargoABC , "2008-01-05" , HandlingEvent . Type . CLAIM , null ) ; // CargoCBA DeliverySpec deliverySpec4 = new DeliverySpec ( ) ; deliverySpec . setOrigin ( HELSINKI ) ; deliverySpec . setDestination ( STOCKHOLM ) ; final Cargo cargoCBA = new Cargo ( "CBA" , deliverySpec4 ) ; registerEvent ( cargoCBA , "2008-01-10" , HandlingEvent . Type . RECEIVE , null ) ;
public class CauchoUtil { /** * Loads a class from a classloader . If the loader is null , uses the * context class loader . * @ param name the classname , separated by ' . ' * @ param init if true , resolves the class instances * @ param loader the class loader * @ return the loaded class . */ public static Class < ? > loadClass ( String name , boolean init , ClassLoader loader ) throws ClassNotFoundException { } }
if ( loader == null ) loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( loader == null || loader . equals ( CauchoUtil . class . getClassLoader ( ) ) ) return Class . forName ( name ) ; else return Class . forName ( name , init , loader ) ;
public class EDLLoader { /** * Loads EDL mentions , grouped by document . Multimap keys are in alphabetical order * by document ID . */ public ImmutableListMultimap < Symbol , EDLMention > loadEDLMentionsByDocFrom ( CharSource source ) throws IOException { } }
final ImmutableList < EDLMention > edlMentions = loadEDLMentionsFrom ( source ) ; final ImmutableListMultimap . Builder < Symbol , EDLMention > byDocs = ImmutableListMultimap . < Symbol , EDLMention > builder ( ) . orderKeysBy ( SymbolUtils . byStringOrdering ( ) ) ; for ( final EDLMention edlMention : edlMentions ) { byDocs . put ( edlMention . documentID ( ) , edlMention ) ; } return byDocs . build ( ) ;
public class DataItem { /** * Instructs this DataItem to write its data to the given WriteableLogRecord . * The write involves writing the length of the data as an int followed by * the data itself . The data is written at the log record ' s current position . * @ param logRecord The WriteableLogRecord to write the enapsulated data to . */ protected void write ( WriteableLogRecord logRecord ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "write" , new Object [ ] { logRecord , this } ) ; // Retrieve the data stored within this data item . This will either come from // the cached in memory copy or retrieved from disk . byte [ ] data = this . getData ( ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Writing '" + data . length + "' bytes " + RLSUtils . toHexString ( data , RLSUtils . MAX_DISPLAY_BYTES ) ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Writing length field" ) ; logRecord . putInt ( data . length ) ; // If this data item is using the file backed storage method then we need // record details of the mapped storage buffer and offset within that buffer // where the data has been placed . If this is the first write call then the // information would currently be cached in memory . After this call has // executed further access will require the getData method to go to disk . // If this is a subsequent write call then the getData method will retrieve // the information from the current mapped byte buffer and position and // write it to the new mapped byte buffer and position . It will then cache // the new location details . if ( _storageMode == MultiScopeRecoveryLog . FILE_BACKED ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Updaing data location references" ) ; _filePosition = logRecord . position ( ) ; _logRecord = logRecord ; _data = null ; } if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Writing data field" ) ; logRecord . put ( data ) ; if ( ! _written ) { // This is the first time since creation of this object or reset of its internal // data ( SingleData class only , setData method ) that the write method has been // called . We know that the parent recoverable unit section is accounting for // this data items payload in its unwritten data size . Accordingly we need to // direct it to update this value . _rus . payloadWritten ( _dataSize + HEADER_SIZE ) ; } _written = true ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "write" ) ;