signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class CreateNetworkProfileRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateNetworkProfileRequest createNetworkProfileRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( createNetworkProfileRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createNetworkProfileRequest . getProjectArn ( ) , PROJECTARN_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getUplinkBandwidthBits ( ) , UPLINKBANDWIDTHBITS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getDownlinkBandwidthBits ( ) , DOWNLINKBANDWIDTHBITS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getUplinkDelayMs ( ) , UPLINKDELAYMS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getDownlinkDelayMs ( ) , DOWNLINKDELAYMS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getUplinkJitterMs ( ) , UPLINKJITTERMS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getDownlinkJitterMs ( ) , DOWNLINKJITTERMS_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getUplinkLossPercent ( ) , UPLINKLOSSPERCENT_BINDING ) ; protocolMarshaller . marshall ( createNetworkProfileRequest . getDownlinkLossPercent ( ) , DOWNLINKLOSSPERCENT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Trigger { /** * Perform the action if trigger condition is met */
@ Override public void run ( ) { } }
|
if ( isTriggerExpired ( ) ) { logger . info ( this + " expired" ) ; return ; } final boolean isTriggerConditionMet = this . triggerCondition . isMet ( ) ; if ( isTriggerConditionMet ) { logger . info ( "Condition " + this . triggerCondition . getExpression ( ) + " met" ) ; for ( final TriggerAction action : this . actions ) { try { action . doAction ( ) ; } catch ( final Exception e ) { logger . error ( "Failed to do action " + action . getDescription ( ) + " for execution " + azkaban . execapp . Trigger . this . execId , e ) ; } } }
|
public class MapComposedElement { /** * Replies the group index inside which the point is located at the specified index .
* @ param pointIndex is the global index of the point .
* @ return the index of the group .
* This value is between < code > 0 < / code > and < code > this . getGroupCount ( ) < / code >
* @ throws IndexOutOfBoundsException in case of error . */
private int groupIndexForPoint ( int pointIndex ) { } }
|
if ( this . pointCoordinates == null || pointIndex < 0 || pointIndex >= this . pointCoordinates . length ) { throw new IndexOutOfBoundsException ( ) ; } if ( this . partIndexes == null ) { return 0 ; } for ( int i = 0 ; i < this . partIndexes . length ; ++ i ) { if ( pointIndex < this . partIndexes [ i ] ) { return i ; } } return this . partIndexes . length ;
|
public class HashMapImpl { /** * Put the item in the best location available in the hash table . */
private void refillEntries ( int hash ) { } }
|
for ( int count = _size ; count >= 0 ; count -- ) { hash = ( hash + 1 ) & _mask ; if ( _values [ hash ] == null ) return ; refillEntry ( hash ) ; }
|
public class HttpServer { /** * Define a virtual host alias .
* All requests to the alias are handled the same as request for
* the virtualHost .
* @ deprecated Use HttpContext . addVirtualHost
* @ param virtualHost Host name or IP
* @ param alias Alias hostname or IP */
public void addHostAlias ( String virtualHost , String alias ) { } }
|
log . warn ( "addHostAlias is deprecated. Use HttpContext.addVirtualHost" ) ; Object contextMap = _virtualHostMap . get ( virtualHost ) ; if ( contextMap == null ) throw new IllegalArgumentException ( "No Such Host: " + virtualHost ) ; _virtualHostMap . put ( alias , contextMap ) ;
|
public class HttpAuthServiceBuilder { /** * Adds an OAuth2 { @ link Authorizer } . */
public HttpAuthServiceBuilder addOAuth2 ( Authorizer < ? super OAuth2Token > authorizer ) { } }
|
return addTokenAuthorizer ( AuthTokenExtractors . OAUTH2 , requireNonNull ( authorizer , "authorizer" ) ) ;
|
public class SoyElementPass { /** * See go / soy - element - keyed - roots for reasoning on why this is disallowed . */
private void validateNoKey ( HtmlOpenTagNode firstTagNode ) { } }
|
for ( SoyNode child : firstTagNode . getChildren ( ) ) { if ( child instanceof KeyNode ) { errorReporter . report ( firstTagNode . getSourceLocation ( ) , ROOT_HAS_KEY_NODE ) ; } }
|
public class MisoScenePanel { /** * Moves the scene such that the specified tile is in the center . */
public void centerOnTile ( int tx , int ty ) { } }
|
Rectangle trect = MisoUtil . getTilePolygon ( _metrics , tx , ty ) . getBounds ( ) ; int nx = trect . x + trect . width / 2 - _vbounds . width / 2 ; int ny = trect . y + trect . height / 2 - _vbounds . height / 2 ; // Log . info ( " Centering on t : " + StringUtil . coordsToString ( tx , ty ) +
// " b : " + StringUtil . toString ( trect ) +
// " vb : " + StringUtil . toString ( _ vbounds ) +
// " , n : " + StringUtil . coordsToString ( nx , ny ) + " . " ) ;
setViewLocation ( nx , ny ) ;
|
public class RaftServiceManager { /** * Applies an initialize entry .
* Initialize entries are used only at the beginning of a new leader ' s term to force the commitment of entries from
* prior terms , therefore no logic needs to take place . */
private CompletableFuture < Void > applyInitialize ( Indexed < InitializeEntry > entry ) { } }
|
for ( RaftServiceContext service : raft . getServices ( ) ) { service . keepAliveSessions ( entry . index ( ) , entry . entry ( ) . timestamp ( ) ) ; } return CompletableFuture . completedFuture ( null ) ;
|
public class ProviderRest { /** * Gets the information of an specific provider If the provider it is not in
* the database , it returns 404 with empty payload
* < pre >
* GET / providers / { uuid }
* Request :
* GET / providers HTTP / 1.1
* Response :
* { @ code
* < ? xml version = " 1.0 " encoding = " UTF - 8 " ? >
* < provider >
* < uuid > fc923960-03fe - 41eb - 8a21 - a56709f9370f < / uuid >
* < name > provider - prueba < / name >
* < / provider >
* < / pre >
* Example : < li > curl
* http : / / localhost : 8080 / sla - service / providers / fc923960-03f
* e - 41eb - 8a21 - a56709f9370f < / li >
* @ param uuid
* of the provider
* @ return XML information with the different details of the provider */
@ GET @ Path ( "{uuid}" ) @ Produces ( MediaType . APPLICATION_XML ) public Response getProviderByUuid ( @ PathParam ( "uuid" ) String provider_uuid ) throws IOException , JAXBException { } }
|
logger . debug ( "StartOf getProviderByUuid - REQUEST for /providers/" + provider_uuid ) ; try { ProviderHelper providerRestService = getProviderHelper ( ) ; String serializedProvider = providerRestService . getProviderByUUID ( provider_uuid ) ; if ( serializedProvider != null ) { logger . debug ( "EndOf getProviderByUuid" ) ; return buildResponse ( 200 , serializedProvider ) ; } else { logger . debug ( "EndOf getProviderByUuid" ) ; return buildResponse ( 404 , printError ( 404 , "There is no provider with uuid " + provider_uuid + " in the SLA Repository Database" ) ) ; } } catch ( HelperException e ) { logger . info ( "getProviderByUuid exception:" + e . getMessage ( ) ) ; return buildResponse ( e ) ; }
|
public class Metrics { /** * Tracks a monotonically increasing value .
* @ param name The base metric name
* @ param tags MUST be an even number of arguments representing key / value pairs of tags .
* @ return A new or existing counter . */
public static Counter counter ( String name , String ... tags ) { } }
|
return globalRegistry . counter ( name , tags ) ;
|
public class Tuple14 { /** * Skip 8 degrees from this tuple . */
public final Tuple6 < T9 , T10 , T11 , T12 , T13 , T14 > skip8 ( ) { } }
|
return new Tuple6 < > ( v9 , v10 , v11 , v12 , v13 , v14 ) ;
|
public class CmsDynamicFunctionFormatWrapper { /** * Gets the parameters for this dynamic function format . < p >
* @ return the map of parameters for the dynamic function */
public Map < String , String > getParameters ( ) { } }
|
if ( m_format != null ) { return m_format . getParameters ( ) ; } return Collections . emptyMap ( ) ;
|
public class PropertiesConfigurationProducer { /** * Satisfies injection for java . util . Properties
* @ param injectionPoint EE6 injection point
* @ return the java . util . Properties loaded from the
* configuration files ( if found ) */
@ Produces @ Configuration public Properties getProperties ( InjectionPoint injectionPoint ) { } }
|
// properties should be stored here
Properties properties = new Properties ( ) ; // locate configurations
ConfigurationWrapper configuration = this . getConfigurationWrapper ( injectionPoint ) ; List < ISource > found = this . locate ( configuration ) ; // input stream list is immutable , copy so we can reverse
// if it needs merge
List < ISource > copy = new ArrayList < ISource > ( found ) ; // when merged the lowest priority should go first
// since the list comes in the order where the
// most important properties are found first
// it needs to be reversed
if ( configuration . merge ( ) ) { Collections . reverse ( copy ) ; } // show how many streams were located
this . logger . trace ( "Found {} streams to load properties from" , copy . size ( ) ) ; // load each properties item individually
for ( ISource source : copy ) { // get type for stream
SupportedType type = MimeGuesser . guess ( source ) ; // if stream is not available , continue
if ( ! source . available ( ) ) { continue ; } // get stream
InputStream stream = source . stream ( ) ; // load properties
Properties local = new Properties ( ) ; try { // support XML as a type
if ( SupportedType . XML . equals ( type ) ) { local . loadFromXML ( stream ) ; } else { local . load ( stream ) ; } // log
this . logger . trace ( "Loaded {} properties from stream type '{}'" , local . size ( ) , type . name ( ) ) ; // and then merge into properties
properties . putAll ( local ) ; } catch ( IOException e ) { this . logger . error ( "An error occured while loading configuration properties: {}" , e . getMessage ( ) ) ; } // close stream
try { stream . close ( ) ; } catch ( IOException e ) { this . logger . trace ( "Could not close old stream: {}" , stream ) ; } // if not merge , then we ' re done
if ( ! configuration . merge ( ) ) { break ; } } // return properties
return properties ;
|
public class AmortizedSparseVector { /** * { @ inheritDoc } */
public double magnitude ( ) { } }
|
double m = 0 ; for ( IndexValue v : values ) m += v . value * v . value ; return Math . sqrt ( m ) ;
|
public class BpmnParse { /** * Parses the end events of a certain level in the process ( process ,
* subprocess or another scope ) .
* @ param parentElement
* The ' parent ' element that contains the end events ( process ,
* subprocess ) .
* @ param scope
* The { @ link ScopeImpl } to which the end events must be added . */
public void parseEndEvents ( Element parentElement , ScopeImpl scope ) { } }
|
for ( Element endEventElement : parentElement . elements ( "endEvent" ) ) { ActivityImpl activity = createActivityOnScope ( endEventElement , scope ) ; Element errorEventDefinition = endEventElement . element ( ERROR_EVENT_DEFINITION ) ; Element cancelEventDefinition = endEventElement . element ( CANCEL_EVENT_DEFINITION ) ; Element terminateEventDefinition = endEventElement . element ( "terminateEventDefinition" ) ; Element messageEventDefinitionElement = endEventElement . element ( MESSAGE_EVENT_DEFINITION ) ; Element signalEventDefinition = endEventElement . element ( SIGNAL_EVENT_DEFINITION ) ; Element compensateEventDefinitionElement = endEventElement . element ( COMPENSATE_EVENT_DEFINITION ) ; Element escalationEventDefinition = endEventElement . element ( ESCALATION_EVENT_DEFINITION ) ; if ( errorEventDefinition != null ) { // error end event
String errorRef = errorEventDefinition . attribute ( "errorRef" ) ; if ( errorRef == null || "" . equals ( errorRef ) ) { addError ( "'errorRef' attribute is mandatory on error end event" , errorEventDefinition ) ; } else { Error error = errors . get ( errorRef ) ; if ( error != null && ( error . getErrorCode ( ) == null || "" . equals ( error . getErrorCode ( ) ) ) ) { addError ( "'errorCode' is mandatory on errors referenced by throwing error event definitions, but the error '" + error . getId ( ) + "' does not define one." , errorEventDefinition ) ; } activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_ERROR ) ; if ( error != null ) { activity . setActivityBehavior ( new ErrorEndEventActivityBehavior ( error . getErrorCode ( ) ) ) ; } else { activity . setActivityBehavior ( new ErrorEndEventActivityBehavior ( errorRef ) ) ; } } } else if ( cancelEventDefinition != null ) { if ( scope . getProperty ( BpmnProperties . TYPE . getName ( ) ) == null || ! scope . getProperty ( BpmnProperties . TYPE . getName ( ) ) . equals ( "transaction" ) ) { addError ( "end event with cancelEventDefinition only supported inside transaction subprocess" , cancelEventDefinition ) ; } else { activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_CANCEL ) ; activity . setActivityBehavior ( new CancelEndEventActivityBehavior ( ) ) ; activity . setActivityStartBehavior ( ActivityStartBehavior . INTERRUPT_FLOW_SCOPE ) ; activity . setProperty ( PROPERTYNAME_THROWS_COMPENSATION , true ) ; activity . setScope ( true ) ; } } else if ( terminateEventDefinition != null ) { activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_TERMINATE ) ; activity . setActivityBehavior ( new TerminateEndEventActivityBehavior ( ) ) ; activity . setActivityStartBehavior ( ActivityStartBehavior . INTERRUPT_FLOW_SCOPE ) ; } else if ( messageEventDefinitionElement != null ) { if ( isServiceTaskLike ( messageEventDefinitionElement ) ) { // CAM - 436 same behaviour as service task
ActivityImpl act = parseServiceTaskLike ( ActivityTypes . END_EVENT_MESSAGE , messageEventDefinitionElement , scope ) ; activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_MESSAGE ) ; activity . setActivityBehavior ( act . getActivityBehavior ( ) ) ; scope . getActivities ( ) . remove ( act ) ; } else { // default to non behavior if no service task
// properties have been specified
activity . setActivityBehavior ( new IntermediateThrowNoneEventActivityBehavior ( ) ) ; } } else if ( signalEventDefinition != null ) { activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_SIGNAL ) ; EventSubscriptionDeclaration signalDefinition = parseSignalEventDefinition ( signalEventDefinition , true ) ; activity . setActivityBehavior ( new ThrowSignalEventActivityBehavior ( signalDefinition ) ) ; } else if ( compensateEventDefinitionElement != null ) { activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_COMPENSATION ) ; CompensateEventDefinition compensateEventDefinition = parseThrowCompensateEventDefinition ( compensateEventDefinitionElement , scope ) ; activity . setActivityBehavior ( new CompensationEventActivityBehavior ( compensateEventDefinition ) ) ; activity . setProperty ( PROPERTYNAME_THROWS_COMPENSATION , true ) ; activity . setScope ( true ) ; } else if ( escalationEventDefinition != null ) { activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_ESCALATION ) ; Escalation escalation = findEscalationForEscalationEventDefinition ( escalationEventDefinition ) ; if ( escalation != null && escalation . getEscalationCode ( ) == null ) { addError ( "escalation end event must have an 'escalationCode'" , escalationEventDefinition ) ; } activity . setActivityBehavior ( new ThrowEscalationEventActivityBehavior ( escalation ) ) ; } else { // default : none end event
activity . getProperties ( ) . set ( BpmnProperties . TYPE , ActivityTypes . END_EVENT_NONE ) ; activity . setActivityBehavior ( new NoneEndEventActivityBehavior ( ) ) ; } if ( activity != null ) { parseActivityInputOutput ( endEventElement , activity ) ; } parseAsynchronousContinuationForActivity ( endEventElement , activity ) ; parseExecutionListenersOnScope ( endEventElement , activity ) ; for ( BpmnParseListener parseListener : parseListeners ) { parseListener . parseEndEvent ( endEventElement , scope , activity ) ; } }
|
public class DifferentialEvolutionSelection { /** * Execute ( ) method */
@ Override public List < DoubleSolution > execute ( List < DoubleSolution > solutionSet ) { } }
|
if ( null == solutionSet ) { throw new JMetalException ( "Parameter is null" ) ; } else if ( ( solutionListIndex < 0 ) || ( solutionListIndex > solutionSet . size ( ) ) ) { throw new JMetalException ( "Index value invalid: " + solutionListIndex ) ; } else if ( solutionSet . size ( ) < 4 ) { throw new JMetalException ( "The population has less than four solutions: " + solutionSet . size ( ) ) ; } List < DoubleSolution > parents = new ArrayList < > ( 3 ) ; int r1 , r2 , r3 ; do { r1 = randomGenerator . getRandomValue ( 0 , solutionSet . size ( ) - 1 ) ; } while ( r1 == solutionListIndex ) ; do { r2 = randomGenerator . getRandomValue ( 0 , solutionSet . size ( ) - 1 ) ; } while ( r2 == solutionListIndex || r2 == r1 ) ; do { r3 = randomGenerator . getRandomValue ( 0 , solutionSet . size ( ) - 1 ) ; } while ( r3 == solutionListIndex || r3 == r1 || r3 == r2 ) ; parents . add ( solutionSet . get ( r1 ) ) ; parents . add ( solutionSet . get ( r2 ) ) ; parents . add ( solutionSet . get ( r3 ) ) ; return parents ;
|
public class SimulationJobSummary { /** * A list of simulation job robot application names .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRobotApplicationNames ( java . util . Collection ) } or
* { @ link # withRobotApplicationNames ( java . util . Collection ) } if you want to override the existing values .
* @ param robotApplicationNames
* A list of simulation job robot application names .
* @ return Returns a reference to this object so that method calls can be chained together . */
public SimulationJobSummary withRobotApplicationNames ( String ... robotApplicationNames ) { } }
|
if ( this . robotApplicationNames == null ) { setRobotApplicationNames ( new java . util . ArrayList < String > ( robotApplicationNames . length ) ) ; } for ( String ele : robotApplicationNames ) { this . robotApplicationNames . add ( ele ) ; } return this ;
|
public class PersistentTimerTaskHandlerImpl { @ Override public Map < String , String > getExecutionProperties ( ) { } }
|
String taskOwner = j2eeName . getApplication ( ) + "/" + j2eeName . getModule ( ) + "/" + j2eeName . getComponent ( ) ; BeanMetaData bmd = getBeanMetaData ( ) ; HashMap < String , String > props = new HashMap < String , String > ( ) ; // Value for TaskName column that may be queried .
props . put ( ManagedTask . IDENTITY_NAME , getTaskName ( ) ) ; // Indicates whether timer runs under PersistentExecutor transaction
// set to SUSPEND for NOT _ SUPPORTED or BMT
props . put ( ManagedTask . TRANSACTION , runInGlobalTransaction ( bmd . timedMethodInfos [ methodId ] ) ? ManagedTask . USE_TRANSACTION_OF_EXECUTION_THREAD : ManagedTask . SUSPEND ) ; // Useful for future MBean support provided by PersistentExecutor
props . put ( WSContextService . TASK_OWNER , taskOwner ) ; // Pass global transaction timeout
props . put ( PersistentExecutor . TRANSACTION_TIMEOUT , Integer . toString ( bmd . _globalTran . getTransactionTimeout ( ) ) ) ; return props ;
|
public class ProcessAssert { /** * Asserts the task with the provided id is pending completion .
* @ param taskId
* the task ' s id to check for . May not be < code > null < / code > */
public static final void assertTaskUncompleted ( final String taskId ) { } }
|
Validate . notNull ( taskId ) ; apiCallback . debug ( LogMessage . TASK_2 , taskId ) ; try { getTaskInstanceAssertable ( ) . taskIsUncompleted ( taskId ) ; } catch ( final AssertionError ae ) { apiCallback . fail ( ae , LogMessage . ERROR_TASK_2 , taskId ) ; }
|
public class ViSearch { /** * Get insert status by insert trans id , and get errors page .
* @ param transId the id of the insert transaction .
* @ param errorPage page number of the error list
* @ param errorLimit per page limit number of the error list
* @ return the insert transaction */
@ Override public InsertStatus insertStatus ( String transId , Integer errorPage , Integer errorLimit ) { } }
|
return dataOperations . insertStatus ( transId , errorPage , errorLimit ) ;
|
public class PackageManagerUtils { /** * Checks if the device has an app widget feature .
* @ param manager the package manager .
* @ return { @ code true } if the device has an app widget feature . */
@ TargetApi ( Build . VERSION_CODES . JELLY_BEAN_MR2 ) public static boolean hasAppWidgetFeature ( PackageManager manager ) { } }
|
return manager . hasSystemFeature ( PackageManager . FEATURE_APP_WIDGETS ) ;
|
public class BaseDestinationHandler { /** * Do we have a remote localisation ? */
public void setRemote ( boolean hasRemote ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setRemote" , Boolean . valueOf ( hasRemote ) ) ; getLocalisationManager ( ) . setRemote ( hasRemote ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setRemote" ) ;
|
public class ValueEnforcer { /** * Check that the passed iterable contains no < code > null < / code > value . But the
* whole iterable can be < code > null < / code > or empty .
* @ param < T >
* Type to be checked and returned
* @ param aValue
* The collection to check . May be < code > null < / code > .
* @ param sName
* The name of the value ( e . g . the parameter name )
* @ return The passed value . Maybe < code > null < / code > .
* @ throws IllegalArgumentException
* if the passed value is not empty and a < code > null < / code > value is
* contained */
@ Nullable public static < T extends Iterable < ? > > T noNullValue ( final T aValue , final String sName ) { } }
|
if ( isEnabled ( ) ) return noNullValue ( aValue , ( ) -> sName ) ; return aValue ;
|
public class WritableAuthorizerConfiguration { /** * Adds a new role to the list of defined roles .
* @ param roleName - The name of the role being added . */
public synchronized void addRoleMapping ( final String roleName ) { } }
|
HashMap < String , RoleMappingImpl > newRoles = new HashMap < String , RoleMappingImpl > ( roleMappings ) ; if ( newRoles . containsKey ( roleName ) == false ) { newRoles . put ( roleName , new RoleMappingImpl ( roleName ) ) ; roleMappings = Collections . unmodifiableMap ( newRoles ) ; }
|
public class Line { /** * TODO use Util # skipSpaces */
public boolean skipSpaces ( ) { } }
|
while ( this . pos < this . value . length ( ) && this . value . charAt ( this . pos ) == ' ' ) { this . pos ++ ; } return this . pos < this . value . length ( ) ;
|
public class ListDomainsResult { /** * Information about the domains .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDomains ( java . util . Collection ) } or { @ link # withDomains ( java . util . Collection ) } if you want to override
* the existing values .
* @ param domains
* Information about the domains .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListDomainsResult withDomains ( DomainSummary ... domains ) { } }
|
if ( this . domains == null ) { setDomains ( new java . util . ArrayList < DomainSummary > ( domains . length ) ) ; } for ( DomainSummary ele : domains ) { this . domains . add ( ele ) ; } return this ;
|
public class ExtendedJTATransactionFactory { /** * Get the singleton instance of ExtendedJTATransaction */
public static ExtendedJTATransaction getExtendedJTATransaction ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getExtendedJTATransaction" , instance ) ; } return instance ;
|
public class DescribeVolumesModificationsResult { /** * Information about the volume modifications .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setVolumesModifications ( java . util . Collection ) } or { @ link # withVolumesModifications ( java . util . Collection ) }
* if you want to override the existing values .
* @ param volumesModifications
* Information about the volume modifications .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeVolumesModificationsResult withVolumesModifications ( VolumeModification ... volumesModifications ) { } }
|
if ( this . volumesModifications == null ) { setVolumesModifications ( new com . amazonaws . internal . SdkInternalList < VolumeModification > ( volumesModifications . length ) ) ; } for ( VolumeModification ele : volumesModifications ) { this . volumesModifications . add ( ele ) ; } return this ;
|
public class WindowsFaxClientSpiHelper { /** * This function extracts the native resources ( the fax4j . exe and fax4j . dll )
* and pushes them to the fax4j temporary directory . */
public static void extractNativeResources ( ) { } }
|
synchronized ( WindowsFaxClientSpiHelper . NATIVE_LOCK ) { // get target directory
File directory = IOHelper . getFax4jInternalTemporaryDirectory ( ) ; // extract resources
String [ ] names = new String [ ] { "fax4j.dll" , "fax4j.exe" } ; int amount = names . length ; String name = null ; File file = null ; InputStream inputStream = null ; OutputStream outputStream = null ; for ( int index = 0 ; index < amount ; index ++ ) { // get next resource
name = names [ index ] ; // get file
file = new File ( directory , name ) ; if ( ! file . exists ( ) ) { // get input stream
inputStream = WindowsFaxClientSpiHelper . class . getResourceAsStream ( name ) ; if ( inputStream != null ) { try { // create output stream
outputStream = new FileOutputStream ( file ) ; // write data to file
IOHelper . readAndWriteStreams ( inputStream , outputStream ) ; } catch ( IOException exception ) { throw new FaxException ( "Unable to extract resource: " + name , exception ) ; } } } } }
|
public class PAbstractObject { /** * Get a property as a string or throw an exception .
* @ param key the property name */
@ Override public final String getString ( final String key ) { } }
|
String result = optString ( key ) ; if ( result == null ) { throw new ObjectMissingException ( this , key ) ; } return result ;
|
public class ClassIndex { /** * Retrieves a list of classes annotated by given annotation .
* The annotation must be annotated with { @ link IndexAnnotated } for annotated classes
* to be indexed at compile - time by { @ link org . atteo . classindex . processor . ClassIndexProcessor } .
* @ param annotation annotation to search class for
* @ param classLoader classloader for loading classes
* @ return list of annotated classes */
public static Iterable < Class < ? > > getAnnotated ( Class < ? extends Annotation > annotation , ClassLoader classLoader ) { } }
|
Iterable < String > entries = getAnnotatedNames ( annotation , classLoader ) ; Set < Class < ? > > classes = new HashSet < > ( ) ; findClasses ( classLoader , classes , entries ) ; return classes ;
|
public class HashBasedPrimaryElection { /** * Handles a cluster membership event . */
private void handleClusterMembershipEvent ( ClusterMembershipEvent event ) { } }
|
if ( event . type ( ) == ClusterMembershipEvent . Type . MEMBER_ADDED || event . type ( ) == ClusterMembershipEvent . Type . MEMBER_REMOVED ) { recomputeTerm ( groupMembershipService . getMembership ( partitionId . group ( ) ) ) ; }
|
public class ApiClient { /** * Download file from the given response .
* @ param response An instance of the Response object
* @ throws ApiException If fail to read file content from response and write to disk
* @ return Downloaded file */
public File downloadFileFromResponse ( Response response ) throws ApiException { } }
|
try { File file = prepareDownloadFile ( response ) ; BufferedSink sink = Okio . buffer ( Okio . sink ( file ) ) ; sink . writeAll ( response . body ( ) . source ( ) ) ; sink . close ( ) ; return file ; } catch ( IOException e ) { throw new ApiException ( e ) ; }
|
public class DatabaseInformationFull { /** * Retrieves a < code > Table < / code > object describing all visible
* sessions . ADMIN users see * all * sessions
* while non - admin users see only their own session . < p >
* Each row is a session state description with the following columns : < p >
* < pre class = " SqlCodeExample " >
* SESSION _ ID INTEGER session identifier
* CONNECTED TIMESTAMP time at which session was created
* USER _ NAME VARCHAR db user name of current session user
* IS _ ADMIN BOOLEAN is session user an admin user ?
* AUTOCOMMIT BOOLEAN is session in autocommit mode ?
* READONLY BOOLEAN is session in read - only mode ?
* MAXROWS INTEGER session ' s MAXROWS setting
* LAST _ IDENTITY INTEGER last identity value used by this session
* TRANSACTION _ SIZE INTEGER # of undo items in current transaction
* SCHEMA VARCHAR current schema for session
* < / pre > < p >
* @ return a < code > Table < / code > object describing all visible
* sessions */
Table SYSTEM_SESSIONS ( ) { } }
|
Table t = sysTables [ SYSTEM_SESSIONS ] ; if ( t == null ) { t = createBlankTable ( sysTableHsqlNames [ SYSTEM_SESSIONS ] ) ; addColumn ( t , "SESSION_ID" , CARDINAL_NUMBER ) ; addColumn ( t , "CONNECTED" , TIME_STAMP ) ; addColumn ( t , "USER_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "IS_ADMIN" , Type . SQL_BOOLEAN ) ; addColumn ( t , "AUTOCOMMIT" , Type . SQL_BOOLEAN ) ; addColumn ( t , "READONLY" , Type . SQL_BOOLEAN ) ; addColumn ( t , "MAXROWS" , CARDINAL_NUMBER ) ; // Note : some sessions may have a NULL LAST _ IDENTITY value
addColumn ( t , "LAST_IDENTITY" , CARDINAL_NUMBER ) ; addColumn ( t , "TRANSACTION_SIZE" , CARDINAL_NUMBER ) ; addColumn ( t , "SCHEMA" , SQL_IDENTIFIER ) ; // order : SESSION _ ID
// true primary key
HsqlName name = HsqlNameManager . newInfoSchemaObjectName ( sysTableHsqlNames [ SYSTEM_SESSIONS ] . name , false , SchemaObject . INDEX ) ; t . createPrimaryKey ( name , new int [ ] { 0 } , true ) ; return t ; } // column number mappings
final int isid = 0 ; final int ict = 1 ; final int iuname = 2 ; final int iis_admin = 3 ; final int iautocmt = 4 ; final int ireadonly = 5 ; final int imaxrows = 6 ; final int ilast_id = 7 ; final int it_size = 8 ; final int it_schema = 9 ; PersistentStore store = database . persistentStoreCollection . getStore ( t ) ; // intermediate holders
Session [ ] sessions ; Session s ; Object [ ] row ; // Initialisation
sessions = ns . listVisibleSessions ( session ) ; // Do it .
for ( int i = 0 ; i < sessions . length ; i ++ ) { s = sessions [ i ] ; row = t . getEmptyRowData ( ) ; row [ isid ] = ValuePool . getLong ( s . getId ( ) ) ; row [ ict ] = new TimestampData ( s . getConnectTime ( ) / 1000 ) ; row [ iuname ] = s . getUsername ( ) ; row [ iis_admin ] = ValuePool . getBoolean ( s . isAdmin ( ) ) ; row [ iautocmt ] = ValuePool . getBoolean ( s . isAutoCommit ( ) ) ; row [ ireadonly ] = ValuePool . getBoolean ( s . isReadOnlyDefault ( ) ) ; row [ imaxrows ] = ValuePool . getInt ( s . getSQLMaxRows ( ) ) ; row [ ilast_id ] = ValuePool . getLong ( ( ( Number ) s . getLastIdentity ( ) ) . longValue ( ) ) ; row [ it_size ] = ValuePool . getInt ( s . getTransactionSize ( ) ) ; row [ it_schema ] = s . getCurrentSchemaHsqlName ( ) . name ; t . insertSys ( store , row ) ; } return t ;
|
public class Ftp { /** * check if a file exists or not
* @ return FTPCLient
* @ throws IOException
* @ throws PageException */
private AFTPClient actionExistsFile ( ) throws PageException , IOException { } }
|
required ( "remotefile" , remotefile ) ; AFTPClient client = getClient ( ) ; FTPFile file = existsFile ( client , remotefile , true ) ; Struct cfftp = writeCfftp ( client ) ; cfftp . setEL ( RETURN_VALUE , Caster . toBoolean ( file != null && file . isFile ( ) ) ) ; cfftp . setEL ( SUCCEEDED , Boolean . TRUE ) ; stoponerror = false ; return client ;
|
public class Project { /** * Returns the generator set for this project . */
public IGeneratorSet getGenerators ( ) { } }
|
if ( generatorSet_ == null && generatorSetRef_ != null ) { try ( GeneratorSetResource resource = GeneratorSetResource . at ( urlFor ( generatorSetRef_ ) ) ) { generatorSet_ = resource . getGeneratorSet ( ) ; } catch ( Exception e ) { throw new GeneratorSetException ( String . format ( "Can't read resource at %s" , generatorSetRef_ ) , e ) ; } } return generatorSet_ ;
|
public class PmiRegistry { /** * returns all children if recursive = true */
public static StatDescriptor [ ] listStatMembers ( StatDescriptor sd , boolean recursive ) { } }
|
if ( disabled ) return null ; ModuleItem module = null ; if ( sd == null ) module = moduleRoot ; // root
else module = findModuleItem ( sd . getPath ( ) ) ; if ( module == null ) return null ; else { ArrayList list = module . listChildStatDescriptors ( recursive ) ; int n = list . size ( ) ; StatDescriptor [ ] ret = new StatDescriptor [ n ] ; for ( int k = 0 ; k < n ; k ++ ) ret [ k ] = ( StatDescriptor ) list . get ( k ) ; return ret ; }
|
public class StructuredQueryBuilder { /** * Identifies a parent element with child latitude and longitude attributes
* to match with a geospatial query .
* @ param parent the parent of the element with the coordinates
* @ param lat the attribute with the latitude coordinate
* @ param lon the attribute with the longitude coordinate
* @ return the specification for the index on the geospatial coordinates */
public GeospatialIndex geoAttributePair ( Element parent , Attribute lat , Attribute lon ) { } }
|
return new GeoAttributePairImpl ( parent , lat , lon ) ;
|
public class LongColumn { /** * Returns a DateTimeColumn where each value is the LocalDateTime represented by the values in this column
* The values in this column must be longs that represent the time in milliseconds from the epoch as in standard
* Java date / time calculations
* @ param offset The ZoneOffset to use in the calculation
* @ return A column of LocalDateTime values */
public DateTimeColumn asDateTimes ( ZoneOffset offset ) { } }
|
DateTimeColumn column = DateTimeColumn . create ( name ( ) + ": date time" ) ; for ( int i = 0 ; i < size ( ) ; i ++ ) { column . append ( Instant . ofEpochMilli ( getLong ( i ) ) . atZone ( offset ) . toLocalDateTime ( ) ) ; } return column ;
|
public class ChatBalloon { /** * 没有箭头 */
private Path gotNoneArrowPath ( int width , int height ) { } }
|
Path path = new Path ( ) ; RectF rectF ; int diameter ; if ( cornerSizeLeftTop > 0 ) { // 左上角圆角起点
diameter = cornerSizeLeftTop * 2 ; path . moveTo ( 0 , cornerSizeLeftTop ) ; rectF = new RectF ( 0 , 0 , diameter , diameter ) ; path . arcTo ( rectF , 180 , 90 ) ; } else { path . moveTo ( 0 , 0 ) ; } if ( cornerSizeRightTop > 0 ) { // 右上角圆角起点
diameter = cornerSizeRightTop * 2 ; path . lineTo ( width - cornerSizeRightTop , 0 ) ; rectF = new RectF ( width - diameter , 0 , width , diameter ) ; path . arcTo ( rectF , - 90 , 90 ) ; } else { path . lineTo ( width , 0 ) ; } if ( cornerSizeRightBottom > 0 ) { // 右下角圆角起点
diameter = cornerSizeRightBottom * 2 ; path . lineTo ( width , height - cornerSizeRightBottom ) ; rectF = new RectF ( width - diameter , height - diameter , width , height ) ; path . arcTo ( rectF , 0 , 90 ) ; } else { path . lineTo ( width , height ) ; } if ( cornerSizeLeftBottom > 0 ) { // 左下角圆角起点
diameter = cornerSizeLeftBottom * 2 ; path . lineTo ( cornerSizeLeftBottom , height ) ; rectF = new RectF ( 0 , height - diameter , diameter , height ) ; path . arcTo ( rectF , 90 , 90 ) ; } else { path . lineTo ( 0 , height ) ; } path . close ( ) ; return path ;
|
public class WorkflowTemplateServiceClient { /** * Instantiates a template and begins execution .
* < p > The returned Operation can be used to track execution of workflow by polling
* [ operations . get ] [ google . longrunning . Operations . GetOperation ] . The Operation will complete when
* entire workflow is finished .
* < p > The running workflow can be aborted via
* [ operations . cancel ] [ google . longrunning . Operations . CancelOperation ] . This will cause any
* inflight jobs to be cancelled and workflow - owned clusters to be deleted .
* < p > The [ Operation . metadata ] [ google . longrunning . Operation . metadata ] will be
* [ WorkflowMetadata ] [ google . cloud . dataproc . v1beta2 . WorkflowMetadata ] .
* < p > On successful completion , [ Operation . response ] [ google . longrunning . Operation . response ] will
* be [ Empty ] [ google . protobuf . Empty ] .
* < p > Sample code :
* < pre > < code >
* try ( WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient . create ( ) ) {
* WorkflowTemplateName name = WorkflowTemplateName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ WORKFLOW _ TEMPLATE ] " ) ;
* workflowTemplateServiceClient . instantiateWorkflowTemplateAsync ( name ) . get ( ) ;
* < / code > < / pre >
* @ param name Required . The " resource name " of the workflow template , as described in
* https : / / cloud . google . com / apis / design / resource _ names of the form
* ` projects / { project _ id } / regions / { region } / workflowTemplates / { template _ id } `
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , WorkflowMetadata > instantiateWorkflowTemplateAsync ( WorkflowTemplateName name ) { } }
|
InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return instantiateWorkflowTemplateAsync ( request ) ;
|
public class StyleSet { /** * 定义所有单元格的边框类型
* @ param borderSize 边框粗细 { @ link BorderStyle } 枚举
* @ param colorIndex 颜色的short值
* @ return this
* @ since 4.0.0 */
public StyleSet setBorder ( BorderStyle borderSize , IndexedColors colorIndex ) { } }
|
StyleUtil . setBorder ( this . headCellStyle , borderSize , colorIndex ) ; StyleUtil . setBorder ( this . cellStyle , borderSize , colorIndex ) ; StyleUtil . setBorder ( this . cellStyleForNumber , borderSize , colorIndex ) ; StyleUtil . setBorder ( this . cellStyleForDate , borderSize , colorIndex ) ; return this ;
|
public class Variant { /** * Generates an String representation of this Variant that can be parsed .
* @ return A parsable String representation of this Variant */
public String getParseableString ( ) { } }
|
// TODO expand this
switch ( this . getType ( ) ) { case VT_I1 : case VT_I2 : case VT_I4 : case VT_INT : return Integer . toString ( this . intValue ( ) ) ; case VT_I8 : return Long . toString ( this . longValue ( ) ) ; case VT_R4 : return Float . toString ( this . floatValue ( ) ) ; case VT_R8 : return Double . toString ( this . doubleValue ( ) ) ; case VT_BSTR : return this . stringValue ( ) ; case VT_NULL : return "null" ; case VT_BOOL : return Boolean . toString ( this . booleanValue ( ) ) ; case VT_ERROR : return Integer . toHexString ( this . getError ( ) ) ; } System . err . println ( "Don't know how to print " + this . getType ( ) . name ( ) + " as an Java literal" ) ; return null ;
|
public class TimeoutStepExecutor { /** * Run a task on the scheduled executor so that we can try to interrupt it and time out if it fails */
void runWithinPeriod ( Runnable runnable , ExecuteStepMessage executeStepMessage , int timeout , TimeUnit unit ) { } }
|
if ( ! isRunningAStep . getAndSet ( true ) ) { this . currentlyExecutingStep = executeStepMessage ; Future < String > future = null ; try { future = scheduledExecutorService . submit ( runStepAndResetIsRunning ( runnable ) , "OK" ) ; future . get ( timeout , unit ) ; } catch ( TimeoutException e ) { // Timed out waiting for the step to run
// We should try to cancel and interrupt the thread which is running the step - although this isn ' t
// guaranteed to succeed .
future . cancel ( true ) ; log . warn ( "A step failed to execute within " + timeout + " " + unit + ", attempting to cancel the step" ) ; // Here the step server should have timed out the step and proceed already - we don ' t need to send a failure message
} catch ( Exception e ) { String ms = "Exception while executing step [" + e . getMessage ( ) + "]" ; log . error ( ms , e ) ; stepFailureConsumer . accept ( ms , executeStepMessage ) ; } } else { // server will time out this step
String message = "Cannot execute a test step, a step is already in progress [" + currentlyExecutingStep . getStepId ( ) + ", " + currentlyExecutingStep . getPattern ( ) + "]" ; log . error ( message ) ; stepFailureConsumer . accept ( message , executeStepMessage ) ; }
|
public class InternalXtextParser { /** * InternalXtext . g : 1045:1 : entryRuleParenthesizedElement : ruleParenthesizedElement EOF ; */
public final void entryRuleParenthesizedElement ( ) throws RecognitionException { } }
|
try { // InternalXtext . g : 1046:1 : ( ruleParenthesizedElement EOF )
// InternalXtext . g : 1047:1 : ruleParenthesizedElement EOF
{ before ( grammarAccess . getParenthesizedElementRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleParenthesizedElement ( ) ; state . _fsp -- ; after ( grammarAccess . getParenthesizedElementRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
|
public class ClassUtils { /** * < p > searchProperty . < / p >
* @ param leftParameter a { @ link java . lang . Object } object .
* @ param name a { @ link java . lang . String } object .
* @ return a { @ link java . lang . Object } object .
* @ throws java . lang . Exception if any . */
@ SuppressWarnings ( "unchecked" ) public static Object searchProperty ( Object leftParameter , String name ) throws Exception { } }
|
Class < ? > leftClass = leftParameter . getClass ( ) ; Object result ; if ( leftParameter . getClass ( ) . isArray ( ) && "length" . equals ( name ) ) { result = Array . getLength ( leftParameter ) ; } else if ( leftParameter instanceof Map ) { result = ( ( Map < Object , Object > ) leftParameter ) . get ( name ) ; } else { try { String getter = "get" + name . substring ( 0 , 1 ) . toUpperCase ( ) + name . substring ( 1 ) ; Method method = leftClass . getMethod ( getter ) ; if ( ! method . isAccessible ( ) ) { method . setAccessible ( true ) ; } result = method . invoke ( leftParameter ) ; } catch ( NoSuchMethodException e2 ) { try { String getter = "is" + name . substring ( 0 , 1 ) . toUpperCase ( ) + name . substring ( 1 ) ; Method method = leftClass . getMethod ( getter ) ; if ( ! method . isAccessible ( ) ) { method . setAccessible ( true ) ; } result = method . invoke ( leftParameter ) ; } catch ( NoSuchMethodException e3 ) { Field field = leftClass . getField ( name ) ; result = field . get ( leftParameter ) ; } } } return result ;
|
public class QueryGroupTreeElement { /** * Removes a query from the group and returns the removed query , or null if
* the query was not found in this group . */
public QueryTreeElement removeQuery ( String query_id ) { } }
|
for ( QueryTreeElement query : queries ) { if ( query . getID ( ) . equals ( query_id ) ) { queries . remove ( query ) ; return query ; } } return null ;
|
public class Hipster { /** * Instantiates a IDA * algorithm given a problem definition .
* @ param components
* search problem definition with the components of the algorithm
* @ param < A >
* type of the actions
* @ param < S >
* type of the states
* @ param < C >
* type of the cost
* @ param < N >
* type of the nodes
* @ return instance of { @ link es . usc . citius . hipster . algorithm . IDAStar } for
* the problem definition */
public static < A , S , C extends Comparable < C > , N extends HeuristicNode < A , S , C , N > > IDAStar < A , S , C , N > createIDAStar ( SearchProblem < A , S , N > components ) { } }
|
return new IDAStar < A , S , C , N > ( components . getInitialNode ( ) , components . getExpander ( ) ) ;
|
public class GenericLogicDiscoverer { /** * Discover the operations that are classified by some ( i . e . , at least one ) of the types given . That is , all those
* that have some level of matching with respect to at least one entity provided in the model references .
* @ param modelReferences the classifications to match against
* @ return a Set containing all the matching operations . If there are no solutions , the Set should be empty , not null . */
@ Override public Map < URI , MatchResult > findOperationsClassifiedBySome ( Set < URI > modelReferences ) { } }
|
return findServicesClassifiedBySome ( modelReferences , LogicConceptMatchType . Subsume ) ;
|
public class DailyCalendar { /** * Checks the specified values for validity as a set of time values .
* @ param hourOfDay
* the hour of the time to check ( in military ( 24 - hour ) time )
* @ param minute
* the minute of the time to check
* @ param second
* the second of the time to check
* @ param millis
* the millisecond of the time to check */
private void _validate ( final int hourOfDay , final int minute , final int second , final int millis ) { } }
|
if ( hourOfDay < 0 || hourOfDay > 23 ) { throw new IllegalArgumentException ( invalidHourOfDay + hourOfDay ) ; } if ( minute < 0 || minute > 59 ) { throw new IllegalArgumentException ( invalidMinute + minute ) ; } if ( second < 0 || second > 59 ) { throw new IllegalArgumentException ( invalidSecond + second ) ; } if ( millis < 0 || millis > 999 ) { throw new IllegalArgumentException ( invalidMillis + millis ) ; }
|
public class DisassociateFleetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DisassociateFleetRequest disassociateFleetRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( disassociateFleetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disassociateFleetRequest . getFleetName ( ) , FLEETNAME_BINDING ) ; protocolMarshaller . marshall ( disassociateFleetRequest . getStackName ( ) , STACKNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class SimpleMMcifConsumer { /** * initiate new group , either Hetatom , Nucleotide , or AminoAcid */
private Group getNewGroup ( String recordName , Character aminoCode1 , long seq_id , String groupCode3 ) { } }
|
Group g = ChemCompGroupFactory . getGroupFromChemCompDictionary ( groupCode3 ) ; if ( g != null && ! g . getChemComp ( ) . isEmpty ( ) ) { if ( g instanceof AminoAcidImpl ) { AminoAcidImpl aa = ( AminoAcidImpl ) g ; aa . setId ( seq_id ) ; } else if ( g instanceof NucleotideImpl ) { NucleotideImpl nuc = ( NucleotideImpl ) g ; nuc . setId ( seq_id ) ; } else if ( g instanceof HetatomImpl ) { HetatomImpl het = ( HetatomImpl ) g ; het . setId ( seq_id ) ; } return g ; } Group group ; if ( recordName . equals ( "ATOM" ) ) { if ( StructureTools . isNucleotide ( groupCode3 ) ) { // it is a nucleotide
NucleotideImpl nu = new NucleotideImpl ( ) ; group = nu ; nu . setId ( seq_id ) ; } else if ( aminoCode1 == null || aminoCode1 == StructureTools . UNKNOWN_GROUP_LABEL ) { HetatomImpl h = new HetatomImpl ( ) ; h . setId ( seq_id ) ; group = h ; } else { AminoAcidImpl aa = new AminoAcidImpl ( ) ; aa . setAminoType ( aminoCode1 ) ; aa . setId ( seq_id ) ; group = aa ; } } else { if ( StructureTools . isNucleotide ( groupCode3 ) ) { // it is a nucleotide
NucleotideImpl nu = new NucleotideImpl ( ) ; group = nu ; nu . setId ( seq_id ) ; } else if ( aminoCode1 != null ) { AminoAcidImpl aa = new AminoAcidImpl ( ) ; aa . setAminoType ( aminoCode1 ) ; aa . setId ( seq_id ) ; group = aa ; } else { HetatomImpl h = new HetatomImpl ( ) ; h . setId ( seq_id ) ; group = h ; } } return group ;
|
public class nstimeout { /** * Use this API to fetch all the nstimeout resources that are configured on netscaler . */
public static nstimeout get ( nitro_service service ) throws Exception { } }
|
nstimeout obj = new nstimeout ( ) ; nstimeout [ ] response = ( nstimeout [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
|
public class Session { /** * SEQUENCE current values */
void logSequences ( ) { } }
|
OrderedHashSet set = sessionData . sequenceUpdateSet ; if ( set == null || set . isEmpty ( ) ) { return ; } for ( int i = 0 , size = set . size ( ) ; i < size ; i ++ ) { NumberSequence sequence = ( NumberSequence ) set . get ( i ) ; database . logger . writeSequenceStatement ( this , sequence ) ; } sessionData . sequenceUpdateSet . clear ( ) ;
|
public class SignalRsInner { /** * Get the access keys of the SignalR resource .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param resourceName The name of the SignalR resource .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < SignalRKeysInner > listKeysAsync ( String resourceGroupName , String resourceName , final ServiceCallback < SignalRKeysInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( listKeysWithServiceResponseAsync ( resourceGroupName , resourceName ) , serviceCallback ) ;
|
public class ForwardingClient { /** * Get the active tunnels for a remote forwarding listener .
* @ param key
* @ return ActiveTunnel [ ]
* @ throws IOException */
public ActiveTunnel [ ] getRemoteForwardingTunnels ( String key ) throws IOException { } }
|
synchronized ( incomingtunnels ) { if ( incomingtunnels . containsKey ( key ) ) { Vector < ActiveTunnel > v = incomingtunnels . get ( key ) ; ActiveTunnel [ ] t = new ActiveTunnel [ v . size ( ) ] ; v . copyInto ( t ) ; return t ; } } if ( ! remoteforwardings . containsKey ( key ) ) { throw new IOException ( key + " is not a valid remote forwarding configuration" ) ; } return new ActiveTunnel [ ] { } ;
|
public class ExecutionVertex { /** * Assigns the execution vertex with an { @ link AllocatedResource } .
* @ param allocatedResource
* the resources which are supposed to be allocated to this vertex */
public void setAllocatedResource ( final AllocatedResource allocatedResource ) { } }
|
if ( allocatedResource == null ) { throw new IllegalArgumentException ( "Argument allocatedResource must not be null" ) ; } final AllocatedResource previousResource = this . allocatedResource . getAndSet ( allocatedResource ) ; if ( previousResource != null ) { previousResource . removeVertexFromResource ( this ) ; } allocatedResource . assignVertexToResource ( this ) ; // Notify all listener objects
final Iterator < VertexAssignmentListener > it = this . vertexAssignmentListeners . iterator ( ) ; while ( it . hasNext ( ) ) { it . next ( ) . vertexAssignmentChanged ( this . vertexID , allocatedResource ) ; }
|
public class Numbers { /** * Represents the given { @ link Number } exactly as a long value without any
* magnitude and precision losses ; if that ' s not possible , fails by throwing
* an exception .
* @ param number the number to represent as a long value .
* @ return a long representation of the given number .
* @ throws IllegalArgumentException if no exact representation exists . */
public static long asLongExactly ( Number number ) { } }
|
Class clazz = number . getClass ( ) ; if ( isLongRepresentable ( clazz ) ) { return number . longValue ( ) ; } else if ( isDoubleRepresentable ( clazz ) ) { long longValue = number . longValue ( ) ; if ( equalDoubles ( number . doubleValue ( ) , ( double ) longValue ) ) { return longValue ; } } throw new IllegalArgumentException ( "Can't represent " + number + " as long exactly" ) ;
|
public class SecStrucCalc { /** * Conditions to extend a ladder with a given beta Bridge :
* < li > The bridge and ladder are of the same type .
* < li > The smallest bridge residue is sequential to the first
* strand ladder .
* < li > The second bridge residue is either sequential ( parallel )
* or previous ( antiparallel ) to the second strand of the ladder
* < / li >
* @ param ladder the ladder candidate to extend
* @ param b the beta bridge that would extend the ladder
* @ return true if the bridge b extends the ladder */
private boolean shouldExtendLadder ( Ladder ladder , BetaBridge b ) { } }
|
// Only extend if they are of the same type
boolean sameType = b . type . equals ( ladder . btype ) ; if ( ! sameType ) return false ; // Only extend if residue 1 is sequential to ladder strand
boolean sequential = ( b . partner1 == ladder . to + 1 ) ; if ( ! sequential ) return false ; switch ( b . type ) { case parallel : // Residue 2 should be sequential to second strand
if ( b . partner2 == ladder . lto + 1 ) return true ; break ; case antiparallel : // Residue 2 should be previous to second strand
if ( b . partner2 == ladder . lfrom - 1 ) return true ; break ; } return false ;
|
public class AppServiceEnvironmentsInner { /** * Get usage metrics for a multi - role pool of an App Service Environment .
* Get usage metrics for a multi - role pool of an App Service Environment .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; UsageInner & gt ; object if successful . */
public PagedList < UsageInner > listMultiRoleUsagesNext ( final String nextPageLink ) { } }
|
ServiceResponse < Page < UsageInner > > response = listMultiRoleUsagesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < UsageInner > ( response . body ( ) ) { @ Override public Page < UsageInner > nextPage ( String nextPageLink ) { return listMultiRoleUsagesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
|
public class EsperListenerParser { /** * Parses out a set of configured esper statement listeners .
* @ param element
* the esper listeners element
* @ param parserContext
* the parser ' s current context
* @ return a list of configured esper statement listeners */
@ SuppressWarnings ( "unchecked" ) public ManagedSet parseListeners ( Element element , ParserContext parserContext ) { } }
|
ManagedSet listeners = new ManagedSet ( ) ; NodeList childNodes = element . getChildNodes ( ) ; for ( int i = 0 ; i < childNodes . getLength ( ) ; i ++ ) { Node child = childNodes . item ( i ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { Element childElement = ( Element ) child ; String localName = child . getLocalName ( ) ; if ( "bean" . equals ( localName ) ) { BeanDefinitionHolder holder = parserContext . getDelegate ( ) . parseBeanDefinitionElement ( childElement ) ; parserContext . registerBeanComponent ( new BeanComponentDefinition ( holder ) ) ; listeners . add ( new RuntimeBeanReference ( holder . getBeanName ( ) ) ) ; } else if ( "ref" . equals ( localName ) ) { String ref = childElement . getAttribute ( "bean" ) ; listeners . add ( new RuntimeBeanReference ( ref ) ) ; } } } return listeners ;
|
public class OkRequest { /** * Write the key and value in the entry as form data to the request body
* The pair specified will be URL - encoded and sent with the
* ' application / x - www - form - urlencoded ' content - type
* @ param entry
* @ param charset
* @ return this request */
public OkRequest < T > form ( final Map . Entry < String , String > entry , final String charset ) { } }
|
return form ( entry . getKey ( ) , entry . getValue ( ) , charset ) ;
|
public class StrUtil { /** * not deprecated because this allows vargs */
public static final String join ( CharSequence delimiter , CharSequence ... strs ) { } }
|
return StringUtils . join ( strs , delimiter . toString ( ) ) ;
|
public class PubSubMessageItemStream { /** * < p > This method deletes the messages with no references . Previously these messages
* were deleted during ME startup in reconstitute method . This method is called from
* DeletePubSubMsgsThread context < / p >
* < p > This function gracefully exits in case if ME is stopped or corresponding destination
* is deleted . < / p > */
public void deleteMsgsWithNoReferences ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deleteMsgsWithNoReferences" ) ; NonLockingCursor cursor = null ; try { if ( deleteMessageslock . tryLock ( 0 , TimeUnit . SECONDS ) ) { // trying with tryLock as incase if there is lock contention , then
// removeAllItemsWithNoRefCount ( ) might have acquired lock . In that case we gracefully
// exit as the work is done by removeAllItemsWithNoRefCount ( )
LocalTransaction transaction = destinationHandler . getTxManager ( ) . createLocalTransaction ( true ) ; // Remove any available messages that have no references
cursor = newNonLockingItemCursor ( new ClassEqualsFilter ( MessageItem . class ) ) ; MessageItem messageItem = ( MessageItem ) cursor . next ( ) ; while ( ( messageItem != null ) && ! HasToStop ( ) ) { if ( messageItem . getReferenceCount ( ) == 0 ) { try { // The message is no longer required and can be deleted
messageItem . remove ( ( Transaction ) transaction , NO_LOCK_ID ) ; } catch ( NotInMessageStore e ) { // No FFDC code needed
SibTr . exception ( tc , e ) ; // It is possible that this item has just been removed , log and continue
} } messageItem = ( MessageItem ) cursor . next ( ) ; } transaction . commit ( ) ; } } catch ( InterruptedException e ) { // No FFDC code needed
// code flow may never enter here as nobody would interrupt this thread
// ( may be in case if ME stopped in FORCE mode )
SibTr . exception ( tc , e ) ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC .
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.store.itemstreams.PubSubMessageItemStream.deleteMsgsWithNoReferences" , "1:244:1.71" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteMsgsWithNoReferences" , e ) ; } catch ( SIException e ) { // logging FFDC here itself and not propagating exception to the callers
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.store.itemstreams.PubSubMessageItemStream.deleteMsgsWithNoReferences" , "1:244:1.72" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deleteMsgsWithNoReferences" , e ) ; } finally { if ( cursor != null ) cursor . finished ( ) ; if ( deleteMessageslock . getHoldCount ( ) > 0 ) deleteMessageslock . unlock ( ) ; // only unlock if it is acquired by this thread
}
|
public class BeanToBean { /** * get the bean property type
* @ param clazz
* @ param propertyName
* @ param originalType
* @ return */
protected TypeReference < ? > getBeanPropertyType ( Class < ? > clazz , String propertyName , TypeReference < ? > originalType ) { } }
|
TypeReference < ? > propertyDestinationType = null ; if ( beanDestinationPropertyTypeProvider != null ) { propertyDestinationType = beanDestinationPropertyTypeProvider . getPropertyType ( clazz , propertyName , originalType ) ; } if ( propertyDestinationType == null ) { propertyDestinationType = originalType ; } return propertyDestinationType ;
|
public class FileUtilImpl { /** * This method gets the singleton instance of this { @ link FileUtilImpl } . < br >
* < b > ATTENTION : < / b > < br >
* Please prefer dependency - injection instead of using this method .
* @ return the singleton instance . */
public static FileUtil getInstance ( ) { } }
|
if ( instance == null ) { synchronized ( FileUtilImpl . class ) { if ( instance == null ) { FileUtilImpl util = new FileUtilImpl ( ) ; util . initialize ( ) ; instance = util ; } } } return instance ;
|
public class ClassUtils { /** * Check whether the given class is loadable in the given ClassLoader .
* @ param clazz the class to check ( typically an interface )
* @ param classLoader the ClassLoader to check against
* @ return true if the given class is loadable ; otherwise false
* @ since 6.0.0 */
private static boolean isLoadable ( Class < ? > clazz , ClassLoader classLoader ) { } }
|
try { return ( clazz == classLoader . loadClass ( clazz . getName ( ) ) ) ; // Else : different class with same name found
} catch ( ClassNotFoundException ex ) { // No corresponding class found at all
return false ; }
|
public class GenericTypeImpl { /** * Initializes this class . */
protected final void init ( ) { } }
|
Type genericComponentType = null ; Type genericKeyType = null ; if ( this . type instanceof GenericArrayType ) { GenericArrayType arrayType = ( GenericArrayType ) this . type ; genericComponentType = arrayType . getGenericComponentType ( ) ; } if ( genericComponentType == null ) { TypeVariable < ? > keyTypeVariable = null ; TypeVariable < ? > componentTypeVariable = null ; if ( this . retrievalClass . isArray ( ) ) { genericComponentType = this . retrievalClass . getComponentType ( ) ; } else if ( Collection . class . isAssignableFrom ( this . retrievalClass ) ) { componentTypeVariable = CommonTypeVariables . TYPE_VARIABLE_COLLECTION_ELEMENT ; } else if ( Map . class . isAssignableFrom ( this . retrievalClass ) ) { componentTypeVariable = CommonTypeVariables . TYPE_VARIABLE_MAP_VALUE ; keyTypeVariable = CommonTypeVariables . TYPE_VARIABLE_MAP_KEY ; } if ( componentTypeVariable != null ) { genericComponentType = resolveTypeVariable ( componentTypeVariable , this ) ; if ( genericComponentType == null ) { genericComponentType = componentTypeVariable ; } } if ( keyTypeVariable != null ) { genericKeyType = resolveTypeVariable ( keyTypeVariable , this ) ; if ( genericKeyType == null ) { genericKeyType = keyTypeVariable ; } } } if ( genericComponentType == null ) { this . componentType = null ; } else { this . componentType = create ( genericComponentType , this . definingType ) ; } if ( genericKeyType == null ) { this . keyType = null ; } else { this . keyType = create ( genericKeyType , this . definingType ) ; }
|
public class Asset { /** * This is one - way ( not unique ) */
public static Map < String , String > getLanguageToExtension ( ) { } }
|
if ( languageToExtension == null ) { languageToExtension = new HashMap < String , String > ( ) ; // TODO map should be driven from properties
languageToExtension . put ( "Groovy" , ".groovy" ) ; languageToExtension . put ( "GROOVY" , ".groovy" ) ; languageToExtension . put ( "Kotlin" , ".kt" ) ; languageToExtension . put ( "KOTLIN" , ".kt" ) ; languageToExtension . put ( "Kotlin Script" , ".kts" ) ; languageToExtension . put ( "KOTLIN_SCRIPT" , ".kts" ) ; languageToExtension . put ( "JavaScript" , ".js" ) ; languageToExtension . put ( "JAVASCRIPT" , ".js" ) ; languageToExtension . put ( "JSX" , ".jsx" ) ; languageToExtension . put ( "WEBSCRIPT" , ".js" ) ; languageToExtension . put ( "GPath" , ".groovy" ) ; languageToExtension . put ( "GPATH" , ".groovy" ) ; languageToExtension . put ( "xslt" , ".xsl" ) ; languageToExtension . put ( "XSLT" , ".xsl" ) ; languageToExtension . put ( "PAGELET" , ".xml" ) ; languageToExtension . put ( "Pagelet" , ".xml" ) ; languageToExtension . put ( "Drools" , ".drl" ) ; languageToExtension . put ( "DROOLS" , ".drl" ) ; languageToExtension . put ( "Guided" , ".brl" ) ; languageToExtension . put ( "GUIDED" , ".brl" ) ; languageToExtension . put ( "CSS" , ".css" ) ; languageToExtension . put ( "VELOCITY" , ".vsl" ) ; languageToExtension . put ( "Velocity" , ".vsl" ) ; languageToExtension . put ( "EXCEL" , ".xls" ) ; languageToExtension . put ( "EXCEL_2007" , ".xlsx" ) ; languageToExtension . put ( "MS_WORD" , ".docx" ) ; languageToExtension . put ( "MS Word" , ".docx" ) ; languageToExtension . put ( "HTML" , ".html" ) ; languageToExtension . put ( "Java" , ".java" ) ; languageToExtension . put ( "CONFIG" , ".xml" ) ; languageToExtension . put ( "TESTDATA" , ".tst" ) ; languageToExtension . put ( "JAR" , ".jar" ) ; languageToExtension . put ( "TEXT" , ".txt" ) ; languageToExtension . put ( "XML" , ".xml" ) ; languageToExtension . put ( "WSDL" , ".wsdl" ) ; languageToExtension . put ( "XSL" , ".xsl" ) ; languageToExtension . put ( "XSD" , ".xsd" ) ; languageToExtension . put ( "CSV" , ".csv" ) ; languageToExtension . put ( "SPRING" , ".spring" ) ; languageToExtension . put ( "CAMEL_ROUTE" , ".camel" ) ; languageToExtension . put ( "PROCESS" , ".proc" ) ; languageToExtension . put ( "TEST" , ".test" ) ; languageToExtension . put ( "POSTMAN" , ".postman" ) ; languageToExtension . put ( "FEATURE" , ".feature" ) ; languageToExtension . put ( "YAML" , ".yaml" ) ; languageToExtension . put ( "JSON" , ".json" ) ; languageToExtension . put ( "TASK" , ".task" ) ; languageToExtension . put ( "MARKDOWN" , ".md" ) ; languageToExtension . put ( "Markdown" , ".md" ) ; } return languageToExtension ;
|
public class Tools { /** * Checks if a string is a valid long
* @ param number The string to check
* @ return True if the string is a valid long else false . */
public static boolean isLong ( String number ) { } }
|
boolean result = false ; try { Long . parseLong ( number ) ; result = true ; } catch ( NumberFormatException e ) { } return result ;
|
public class JobPullMachine { /** * 发送Job pull 请求 */
private void sendRequest ( ) throws RemotingCommandFieldCheckException { } }
|
int availableThreads = appContext . getRunnerPool ( ) . getAvailablePoolSize ( ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "current availableThreads:{}" , availableThreads ) ; } if ( availableThreads == 0 ) { return ; } JobPullRequest requestBody = appContext . getCommandBodyWrapper ( ) . wrapper ( new JobPullRequest ( ) ) ; requestBody . setAvailableThreads ( availableThreads ) ; RemotingCommand request = RemotingCommand . createRequestCommand ( JobProtos . RequestCode . JOB_PULL . code ( ) , requestBody ) ; try { RemotingCommand responseCommand = appContext . getRemotingClient ( ) . invokeSync ( request ) ; if ( responseCommand == null ) { LOGGER . warn ( "Job pull request failed! response command is null!" ) ; return ; } if ( JobProtos . ResponseCode . JOB_PULL_SUCCESS . code ( ) == responseCommand . getCode ( ) ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Job pull request success!" ) ; } return ; } LOGGER . warn ( "Job pull request failed! response command is null!" ) ; } catch ( JobTrackerNotFoundException e ) { LOGGER . warn ( "no job tracker available!" ) ; }
|
public class LTriFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static < T1 , T2 , T3 , R > LTriFunction < T1 , T2 , T3 , R > triFunctionFrom ( Consumer < LTriFunctionBuilder < T1 , T2 , T3 , R > > buildingFunction ) { } }
|
LTriFunctionBuilder builder = new LTriFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
|
public class MapFuncSup { /** * define a function to deal with each element in the map
* @ param predicate a function takes in each element from map and returns
* true or false ( or null )
* @ param func a function takes in each element from map and returns
* ' last loop info '
* @ return return ' last loop value ' . < br >
* check
* < a href = " https : / / github . com / wkgcass / Style / " > tutorial < / a > for
* more info about ' last loop value ' */
public < R > R forThose ( RFunc2 < Boolean , K , V > predicate , RFunc2 < R , K , V > func ) { } }
|
return forThose ( predicate , Style . $ ( func ) ) ;
|
public class XTable { /** * Read the data into the table . */
public void readData ( FieldTable table ) throws DBException { } }
|
if ( this . getLookupKey ( ) instanceof String ) { try { FieldList record = table . getRecord ( ) ; String strFilePath = ( String ) this . getLookupKey ( ) ; String strFilePathName = this . getPDatabase ( ) . getFilename ( strFilePath , false ) ; File file = new File ( strFilePathName ) ; if ( file . exists ( ) ) { XmlInOut inOut = new XmlInOut ( null , null , null ) ; boolean bSuccess = inOut . importXML ( ( ( Record ) record ) . getTable ( ) , strFilePathName , null ) ; inOut . free ( ) ; if ( ! bSuccess ) { // Ignore ( for now )
} this . fixCounter ( table ) ; } } catch ( Exception ex ) { ex . printStackTrace ( ) ; } }
|
public class PathProcessor { /** * Process the points in a polygon definition
* @ param element The XML element being read
* @ param tokens The tokens representing the path
* @ return The number of points found
* @ throws ParsingException Indicates an invalid token in the path */
private static Path processPoly ( Element element , StringTokenizer tokens ) throws ParsingException { } }
|
int count = 0 ; ArrayList pts = new ArrayList ( ) ; boolean moved = false ; boolean reasonToBePath = false ; Path path = null ; while ( tokens . hasMoreTokens ( ) ) { try { String nextToken = tokens . nextToken ( ) ; if ( nextToken . equals ( "L" ) ) { float x = Float . parseFloat ( tokens . nextToken ( ) ) ; float y = Float . parseFloat ( tokens . nextToken ( ) ) ; path . lineTo ( x , y ) ; continue ; } if ( nextToken . equals ( "z" ) ) { path . close ( ) ; continue ; } if ( nextToken . equals ( "M" ) ) { if ( ! moved ) { moved = true ; float x = Float . parseFloat ( tokens . nextToken ( ) ) ; float y = Float . parseFloat ( tokens . nextToken ( ) ) ; path = new Path ( x , y ) ; continue ; } reasonToBePath = true ; float x = Float . parseFloat ( tokens . nextToken ( ) ) ; float y = Float . parseFloat ( tokens . nextToken ( ) ) ; path . startHole ( x , y ) ; continue ; } if ( nextToken . equals ( "C" ) ) { reasonToBePath = true ; float cx1 = Float . parseFloat ( tokens . nextToken ( ) ) ; float cy1 = Float . parseFloat ( tokens . nextToken ( ) ) ; float cx2 = Float . parseFloat ( tokens . nextToken ( ) ) ; float cy2 = Float . parseFloat ( tokens . nextToken ( ) ) ; float x = Float . parseFloat ( tokens . nextToken ( ) ) ; float y = Float . parseFloat ( tokens . nextToken ( ) ) ; path . curveTo ( x , y , cx1 , cy1 , cx2 , cy2 ) ; continue ; } } catch ( NumberFormatException e ) { throw new ParsingException ( element . getAttribute ( "id" ) , "Invalid token in points list" , e ) ; } } if ( ! reasonToBePath ) { return null ; } return path ;
|
public class JdbcDatabase { /** * Open the physical database .
* @ exception DBException On open errors . */
public boolean setupDataSourceConnection ( String strDataSource ) throws DBException { } }
|
if ( m_JDBCConnection != null ) return true ; if ( ( DBConstants . TRUE . equalsIgnoreCase ( this . getProperty ( DBParams . SERVLET ) ) ) || ( ( this . getDatabaseOwner ( ) != null ) && ( DBConstants . TRUE . equalsIgnoreCase ( this . getDatabaseOwner ( ) . getEnvironment ( ) . getProperty ( DBParams . SERVLET ) ) ) ) ) { // Datasource connections are only for servlets ( NOTE : Letting this code run otherwise messes stuff up )
try { if ( m_initialContext == null ) m_initialContext = new InitialContext ( ) ; String strDatabaseName = this . getDatabaseName ( true ) ; strDataSource = Utility . replace ( strDataSource , "${dbname}" , strDatabaseName ) ; DataSource ds = ( DataSource ) m_initialContext . lookup ( strDataSource ) ; if ( ds == null ) return false ; // Failure
m_JDBCConnection = ds . getConnection ( ) ; } catch ( NamingException e ) { return false ; // Failure
} catch ( SQLException e ) { return false ; // Failure
} catch ( NoClassDefFoundError e ) { return false ; // Failure
} catch ( Exception e ) { return false ; // Failure
} return true ; // Success !
} else return false ; // Not supported
|
public class ClassPathBuilder { /** * Attempt to parse data of given resource in order to divine the real name
* of the class contained in the resource .
* @ param entry
* the resource */
private void parseClassName ( ICodeBaseEntry entry ) { } }
|
DataInputStream in = null ; try { InputStream resourceIn = entry . openResource ( ) ; if ( resourceIn == null ) { throw new NullPointerException ( "Got null resource" ) ; } in = new DataInputStream ( resourceIn ) ; ClassParserInterface parser = new ClassParser ( in , null , entry ) ; ClassNameAndSuperclassInfo . Builder builder = new ClassNameAndSuperclassInfo . Builder ( ) ; parser . parse ( builder ) ; String trueResourceName = builder . build ( ) . getClassDescriptor ( ) . toResourceName ( ) ; if ( ! trueResourceName . equals ( entry . getResourceName ( ) ) ) { entry . overrideResourceName ( trueResourceName ) ; } } catch ( IOException e ) { errorLogger . logError ( "Invalid class resource " + entry . getResourceName ( ) + " in " + entry , e ) ; } catch ( InvalidClassFileFormatException e ) { errorLogger . logError ( "Invalid class resource " + entry . getResourceName ( ) + " in " + entry , e ) ; } finally { IO . close ( in ) ; }
|
public class SSOAuthenticator { /** * Create an authentication data for ltpaToken
* @ param ssoToken
* @ return authenticationData */
private AuthenticationData createAuthenticationData ( HttpServletRequest req , HttpServletResponse res , String token , String oid ) { } }
|
AuthenticationData authenticationData = new WSAuthenticationData ( ) ; authenticationData . set ( AuthenticationData . HTTP_SERVLET_REQUEST , req ) ; authenticationData . set ( AuthenticationData . HTTP_SERVLET_RESPONSE , res ) ; if ( oid . equals ( LTPA_OID ) ) { authenticationData . set ( AuthenticationData . TOKEN64 , token ) ; } else { authenticationData . set ( AuthenticationData . JWT_TOKEN , token ) ; } authenticationData . set ( AuthenticationData . AUTHENTICATION_MECH_OID , oid ) ; return authenticationData ;
|
public class HttpRequestMessageImpl { /** * @ see com . ibm . ws . genericbnf . internal . BNFHeadersImpl # filterAdd ( com . ibm . wsspi .
* genericbnf . HeaderKeys , byte [ ] ) */
@ Override protected boolean filterAdd ( HeaderKeys key , byte [ ] value ) { } }
|
boolean rc = super . filterAdd ( key , value ) ; if ( HttpHeaderKeys . isWasPrivateHeader ( key . getName ( ) ) ) { rc = isPrivateHeaderTrusted ( key ) ; } return rc ;
|
public class TreeMap { /** * Returns the number of key - value mappings in this map , which ara available to
* the transaction .
* @ param transaction which sees the tree as this size .
* @ return the number of key - value mappings in this map .
* @ exception ObjectManagerException
* @ see com . ibm . ws . objectManager . Collection # size ( com . ibm . ws . objectManager . Transaction ) */
public long size ( Transaction transaction ) throws ObjectManagerException { } }
|
// No trace because this is used by toString ( ) , and hence by trace itself ;
long sizeFound ; // For return ;
synchronized ( this ) { sizeFound = availableSize ; // Move through the map adding in any extra available entries .
if ( transaction != null ) { Entry entry = firstEntry ( transaction ) ; while ( entry != null ) { if ( entry . state == Entry . stateToBeAdded && entry . lockedBy ( transaction ) ) sizeFound ++ ; entry = successor ( entry , transaction ) ; } // while ( entry ! = null ) .
} // if ( transaction ! = null ) .
} // synchronized ( this ) .
return sizeFound ;
|
public class AmazonRDSClient { /** * Deletes automated backups based on the source instance ' s < code > DbiResourceId < / code > value or the restorable
* instance ' s resource ID .
* @ param deleteDBInstanceAutomatedBackupRequest
* Parameter input for the < code > DeleteDBInstanceAutomatedBackup < / code > operation .
* @ return Result of the DeleteDBInstanceAutomatedBackup operation returned by the service .
* @ throws InvalidDBInstanceAutomatedBackupStateException
* The automated backup is in an invalid state . For example , this automated backup is associated with an
* active instance .
* @ throws DBInstanceAutomatedBackupNotFoundException
* No automated backup for this DB instance was found .
* @ sample AmazonRDS . DeleteDBInstanceAutomatedBackup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - 2014-10-31 / DeleteDBInstanceAutomatedBackup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DBInstanceAutomatedBackup deleteDBInstanceAutomatedBackup ( DeleteDBInstanceAutomatedBackupRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDeleteDBInstanceAutomatedBackup ( request ) ;
|
public class STIXSchema { /** * Validate XML text retrieved from URL
* @ param url
* The URL object for the XML to be validated .
* @ return boolean True If the xmlText validates against the schema
* @ throws SAXException
* If the a validation ErrorHandler has not been set , and
* validation throws a SAXException */
public boolean validate ( URL url ) throws SAXException { } }
|
String xmlText = null ; try { xmlText = IOUtils . toString ( url . openStream ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return validate ( xmlText ) ;
|
public class TileBoundingBoxUtils { /** * Get the latitude from the pixel location , bounding box , and image height
* @ param height
* height
* @ param boundingBox
* bounding box
* @ param pixel
* pixel
* @ return latitude */
public static double getLatitudeFromPixel ( long height , BoundingBox boundingBox , float pixel ) { } }
|
return getLatitudeFromPixel ( height , boundingBox , boundingBox , pixel ) ;
|
public class SocketChannelListener { /** * @ see org . browsermob . proxy . jetty . http . HttpListener # isOutOfResources ( ) */
public boolean isOutOfResources ( ) { } }
|
boolean out = getThreads ( ) == getMaxThreads ( ) && getIdleThreads ( ) == 0 ; if ( out && ! _isOut ) { log . warn ( "OUT OF THREADS: " + this ) ; _warned = System . currentTimeMillis ( ) ; _isLow = true ; _isOut = true ; } return out ;
|
public class Avicenna { /** * Adds a direct mapping between a type and an object .
* @ param clazz Class type which should be injected .
* @ param dependency Dependency reference which should be copied to injection targets . */
public static < T > void defineDependency ( Class < T > clazz , T dependency ) { } }
|
defineDependency ( clazz , null , dependency ) ;
|
public class Nfs3 { /** * Read data from a file handle
* @ param path
* the path of the file
* @ param fileHandle
* file handle of the file
* @ param offset
* offset of the file to read
* @ param length
* the length of the data buffer
* @ param data
* the data to be returned
* @ param eof
* is at the end - of - file
* @ return The number of bytes of data returned by the read . the number may
* be smaller than len */
public int read ( String path , byte [ ] fileHandle , long offset , int length , final byte [ ] data , final int pos , final MutableBoolean eof ) throws IOException { } }
|
Nfs3ReadRequest request = new Nfs3ReadRequest ( fileHandle , offset , length , _credential ) ; NfsResponseHandler < Nfs3ReadResponse > responseHandler = new NfsResponseHandler < Nfs3ReadResponse > ( ) { /* ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . rpc . RpcResponseHandler # makeNewResponse ( ) */
protected Nfs3ReadResponse makeNewResponse ( ) { return new Nfs3ReadResponse ( data , pos ) ; } /* ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . nfs . NfsResponseHandler # checkResponse ( com . emc . ecs . nfsclient . rpc . RpcRequest ) */
public void checkResponse ( RpcRequest request ) throws IOException { super . checkResponse ( request ) ; eof . setValue ( getResponse ( ) . isEof ( ) ) ; } } ; _rpcWrapper . callRpcWrapped ( request , responseHandler ) ; return responseHandler . getResponse ( ) . getBytesRead ( ) ;
|
public class ClientDatabase { /** * Get this property .
* @ param strProperty The key to lookup .
* @ return The return value . */
public String getProperty ( String strProperty ) { } }
|
String value = super . getProperty ( strProperty ) ; if ( value == null ) if ( ( BaseDatabase . STARTING_ID . equalsIgnoreCase ( strProperty ) ) || ( BaseDatabase . ENDING_ID . equalsIgnoreCase ( strProperty ) ) ) value = this . getRemoteProperty ( strProperty , true ) ; return value ;
|
public class IndexMigration { /** * Checks if the given < code > index < / code > needs to be migrated .
* @ param index the index to check and migration if needed .
* @ param directoryManager the directory manager .
* @ throws IOException if an error occurs while migrating the index . */
public static void migrate ( PersistentIndex index , DirectoryManager directoryManager ) throws IOException { } }
|
Directory indexDir = index . getDirectory ( ) ; log . debug ( "Checking {} ..." , indexDir ) ; ReadOnlyIndexReader reader = index . getReadOnlyIndexReader ( ) ; try { if ( IndexFormatVersion . getVersion ( reader ) . getVersion ( ) >= IndexFormatVersion . V3 . getVersion ( ) ) { // index was created with Jackrabbit 1.5 or higher
// no need for migration
log . debug ( "IndexFormatVersion >= V3, no migration needed" ) ; return ; } // assert : there is at least one node in the index , otherwise the
// index format version would be at least V3
TermEnum terms = reader . terms ( new Term ( FieldNames . PROPERTIES , "" ) ) ; try { Term t = terms . term ( ) ; if ( t . text ( ) . indexOf ( '\uFFFF' ) == - 1 ) { log . debug ( "Index already migrated" ) ; return ; } } finally { terms . close ( ) ; } } finally { reader . release ( ) ; index . releaseWriterAndReaders ( ) ; } // if we get here then the index must be migrated
log . debug ( "Index requires migration {}" , indexDir ) ; String migrationName = index . getName ( ) + "_v2.3" ; if ( directoryManager . hasDirectory ( migrationName ) ) { directoryManager . delete ( migrationName ) ; } Directory migrationDir = directoryManager . getDirectory ( migrationName ) ; try { IndexWriter writer = new IndexWriter ( migrationDir , new IndexWriterConfig ( Version . LUCENE_36 , new JcrStandartAnalyzer ( ) ) ) ; try { IndexReader r = new MigrationIndexReader ( IndexReader . open ( index . getDirectory ( ) ) ) ; try { writer . addIndexes ( new IndexReader [ ] { r } ) ; writer . close ( ) ; } finally { r . close ( ) ; } } finally { writer . close ( ) ; } } finally { migrationDir . close ( ) ; } directoryManager . delete ( index . getName ( ) ) ; if ( ! directoryManager . rename ( migrationName , index . getName ( ) ) ) { throw new IOException ( "failed to move migrated directory " + migrationDir ) ; } log . info ( "Migrated " + index . getName ( ) ) ;
|
public class SibRaEndpointActivation { /** * Called to indicate that a messaging engine is stopping . Removes it from
* the set of active messaging engines and closes any open connection .
* @ param messagingEngine
* the messaging engine that is stopping
* @ param mode
* the stop mode */
public void messagingEngineStopping ( final JsMessagingEngine messagingEngine , final int mode ) { } }
|
final String methodName = "messagingEngineStopping" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , new Object [ ] { messagingEngine , mode } ) ; } closeConnection ( messagingEngine ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
|
public class PassiveState { /** * Performs a local query . */
protected CompletableFuture < QueryResponse > queryLocal ( QueryEntry entry ) { } }
|
CompletableFuture < QueryResponse > future = new CompletableFuture < > ( ) ; sequenceQuery ( entry , future ) ; return future ;
|
public class RtfWriter2 { /** * Adds an Element to the Document
* @ param element The element to be added
* @ return < code > false < / code >
* @ throws DocumentException */
public boolean add ( Element element ) throws DocumentException { } }
|
if ( pause ) { return false ; } RtfBasicElement [ ] rtfElements = rtfDoc . getMapper ( ) . mapElement ( element ) ; if ( rtfElements . length != 0 ) { for ( int i = 0 ; i < rtfElements . length ; i ++ ) { if ( rtfElements [ i ] != null ) { rtfDoc . add ( rtfElements [ i ] ) ; } } return true ; } else { return false ; }
|
public class MultivaluedPersonAttributeUtils { /** * Convert the & lt ; String , Object & gt ; map to a & lt ; String , List & lt ; Object & gt ; & gt ; map by simply wrapping
* each value in a singleton ( read - only ) List
* @ param seed Map of objects
* @ return Map where each value is a List with the value in it */
public static Map < String , List < Object > > toMultivaluedMap ( final Map < String , Object > seed ) { } }
|
Validate . notNull ( seed , "seed can not be null" ) ; final Map < String , List < Object > > multiSeed = new LinkedHashMap < > ( seed . size ( ) ) ; for ( final Map . Entry < String , Object > seedEntry : seed . entrySet ( ) ) { final String seedName = seedEntry . getKey ( ) ; final Object seedValue = seedEntry . getValue ( ) ; multiSeed . put ( seedName , Collections . singletonList ( seedValue ) ) ; } return multiSeed ;
|
public class MinerAdapter { /** * Gets the first position of the modification feature .
* @ param mf modification feature
* @ return first location */
public int getPositionStart ( ModificationFeature mf ) { } }
|
Set vals = SITE_ACC . getValueFromBean ( mf ) ; if ( ! vals . isEmpty ( ) ) { return ( ( Integer ) vals . iterator ( ) . next ( ) ) ; } vals = INTERVAL_BEGIN_ACC . getValueFromBean ( mf ) ; if ( ! vals . isEmpty ( ) ) { return ( ( Integer ) vals . iterator ( ) . next ( ) ) ; } return - 1 ;
|
public class RedisMock { /** * / * IRedisKeys implementations */
@ Override public synchronized Long del ( final String ... keys ) { } }
|
long deleted = 0L ; String key ; for ( int idx = 0 ; idx < keys . length ; idx += 1 ) { key = keys [ idx ] ; timers . remove ( key ) ; expirations . remove ( key ) ; for ( IRedisCache cache : caches ) { if ( cache . exists ( key ) ) { cache . remove ( key ) ; keyModified ( key ) ; deleted += 1L ; break ; } } } return deleted ;
|
public class SleUtility { /** * Sorts a list of values based on a given sort field using a selection sort .
* @ param values List of values ( implements Extendable ) to sort .
* @ param sort The sort field to sort on .
* @ param ascending Sort ascending / descending .
* @ return Sorted list of values */
public static < T extends Extendable > List < T > sort ( final List < T > values , final Sort sort , final boolean ascending ) { } }
|
final SortableList < T > list = getSortableList ( values ) ; list . sortOnProperty ( sort , ascending , new SortStrategy ( ) ) ; return list ;
|
public class BigDecimalMath { /** * Calculates { @ link BigDecimal } x to the power of the integer value y ( x < sup > y < / sup > ) .
* < p > The value y MUST be an integer value . < / p >
* @ param x the { @ link BigDecimal } value to take to the power
* @ param integerY the { @ link BigDecimal } < strong > integer < / strong > value to serve as exponent
* @ param mathContext the { @ link MathContext } used for the result
* @ return the calculated x to the power of y with the precision specified in the < code > mathContext < / code >
* @ see # pow ( BigDecimal , long , MathContext ) */
private static BigDecimal powInteger ( BigDecimal x , BigDecimal integerY , MathContext mathContext ) { } }
|
if ( fractionalPart ( integerY ) . signum ( ) != 0 ) { throw new IllegalArgumentException ( "Not integer value: " + integerY ) ; } if ( integerY . signum ( ) < 0 ) { return ONE . divide ( powInteger ( x , integerY . negate ( ) , mathContext ) , mathContext ) ; } MathContext mc = new MathContext ( Math . max ( mathContext . getPrecision ( ) , - integerY . scale ( ) ) + 30 , mathContext . getRoundingMode ( ) ) ; BigDecimal result = ONE ; while ( integerY . signum ( ) > 0 ) { BigDecimal halfY = integerY . divide ( TWO , mc ) ; if ( fractionalPart ( halfY ) . signum ( ) != 0 ) { // odd exponent - > multiply result with x
result = result . multiply ( x , mc ) ; integerY = integerY . subtract ( ONE ) ; halfY = integerY . divide ( TWO , mc ) ; } if ( halfY . signum ( ) > 0 ) { // even exponent - > square x
x = x . multiply ( x , mc ) ; } integerY = halfY ; } return round ( result , mathContext ) ;
|
public class sslpolicylabel_sslpolicy_binding { /** * Use this API to fetch sslpolicylabel _ sslpolicy _ binding resources of given name . */
public static sslpolicylabel_sslpolicy_binding [ ] get ( nitro_service service , String labelname ) throws Exception { } }
|
sslpolicylabel_sslpolicy_binding obj = new sslpolicylabel_sslpolicy_binding ( ) ; obj . set_labelname ( labelname ) ; sslpolicylabel_sslpolicy_binding response [ ] = ( sslpolicylabel_sslpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
|
public class CryptoPrimitives { /** * Return PrivateKey from pem bytes .
* @ param pemKey pem - encoded private key
* @ return */
public PrivateKey bytesToPrivateKey ( byte [ ] pemKey ) throws CryptoException { } }
|
PrivateKey pk = null ; CryptoException ce = null ; try { PemReader pr = new PemReader ( new StringReader ( new String ( pemKey ) ) ) ; PemObject po = pr . readPemObject ( ) ; PEMParser pem = new PEMParser ( new StringReader ( new String ( pemKey ) ) ) ; if ( po . getType ( ) . equals ( "PRIVATE KEY" ) ) { pk = new JcaPEMKeyConverter ( ) . getPrivateKey ( ( PrivateKeyInfo ) pem . readObject ( ) ) ; } else { logger . trace ( "Found private key with type " + po . getType ( ) ) ; PEMKeyPair kp = ( PEMKeyPair ) pem . readObject ( ) ; pk = new JcaPEMKeyConverter ( ) . getPrivateKey ( kp . getPrivateKeyInfo ( ) ) ; } } catch ( Exception e ) { throw new CryptoException ( "Failed to convert private key bytes" , e ) ; } return pk ;
|
public class SQLMergeClause { /** * Execute the clause and return the generated key with the type of the given path .
* If no rows were created , null is returned , otherwise the key of the first row is returned .
* @ param < T >
* @ param path path for key
* @ return generated key */
@ SuppressWarnings ( "unchecked" ) @ Nullable public < T > T executeWithKey ( Path < T > path ) { } }
|
return executeWithKey ( ( Class < T > ) path . getType ( ) , path ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.