signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MSPDIWriter { /** * This method writes a single predecessor link to the MSPDI file . * @ param taskID The task UID * @ param type The predecessor type * @ param lag The lag duration * @ return A new link to be added to the MSPDI file */ private Project . Tasks . Task . PredecessorLink writePredecessor ( Integer taskID , RelationType type , Duration lag ) { } }
Project . Tasks . Task . PredecessorLink link = m_factory . createProjectTasksTaskPredecessorLink ( ) ; link . setPredecessorUID ( NumberHelper . getBigInteger ( taskID ) ) ; link . setType ( BigInteger . valueOf ( type . getValue ( ) ) ) ; link . setCrossProject ( Boolean . FALSE ) ; // SF - 300 : required to keep P6 happy when importing MSPDI files if ( lag != null && lag . getDuration ( ) != 0 ) { double linkLag = lag . getDuration ( ) ; if ( lag . getUnits ( ) != TimeUnit . PERCENT && lag . getUnits ( ) != TimeUnit . ELAPSED_PERCENT ) { linkLag = 10.0 * Duration . convertUnits ( linkLag , lag . getUnits ( ) , TimeUnit . MINUTES , m_projectFile . getProjectProperties ( ) ) . getDuration ( ) ; } link . setLinkLag ( BigInteger . valueOf ( ( long ) linkLag ) ) ; link . setLagFormat ( DatatypeConverter . printDurationTimeUnits ( lag . getUnits ( ) , false ) ) ; } else { // SF - 329 : default required to keep Powerproject happy when importing MSPDI files link . setLinkLag ( BIGINTEGER_ZERO ) ; link . setLagFormat ( DatatypeConverter . printDurationTimeUnits ( m_projectFile . getProjectProperties ( ) . getDefaultDurationUnits ( ) , false ) ) ; } return ( link ) ;
public class ClassEntry { /** * Returns true if the source file has been modified . */ @ Override public boolean logModified ( Logger log ) { } }
if ( _depend . logModified ( log ) ) { return true ; } else if ( _sourcePath == null ) return false ; else if ( _sourcePath . getLastModified ( ) != _sourceLastModified ) { log . info ( "source modified time: " + _sourcePath + " old:" + new Date ( _sourceLastModified ) + " new:" + new Date ( _sourcePath . getLastModified ( ) ) ) ; return true ; } else if ( _sourcePath . length ( ) != _sourceLength ) { log . info ( "source modified length: " + _sourcePath + " old:" + _sourceLength + " new:" + _sourcePath . length ( ) ) ; return true ; } else { return false ; }
public class Matcher { /** * Resets this matcher and then attempts to find the next subsequence of * the input sequence that matches the pattern , starting at the specified * index . * < p > If the match succeeds then more information can be obtained via the * < tt > start < / tt > , < tt > end < / tt > , and < tt > group < / tt > methods , and subsequent * invocations of the { @ link # find ( ) } method will start at the first * character not matched by this match . < / p > * @ throws IndexOutOfBoundsException * If start is less than zero or if start is greater than the * length of the input sequence . * @ return < tt > true < / tt > if , and only if , a subsequence of the input * sequence starting at the given index matches this matcher ' s * pattern */ public boolean find ( int start ) { } }
if ( start < 0 || start > input . length ( ) ) { throw new IndexOutOfBoundsException ( "start=" + start + "; length=" + input . length ( ) ) ; } // synchronized ( this ) { matchFound = findImpl ( address , start , matchOffsets ) ; return matchFound ;
public class BatchGetResourceConfigResult { /** * A list of resource keys that were not processed with the current response . The unprocessesResourceKeys value is * in the same form as ResourceKeys , so the value can be directly provided to a subsequent BatchGetResourceConfig * operation . If there are no unprocessed resource keys , the response contains an empty unprocessedResourceKeys * list . * @ param unprocessedResourceKeys * A list of resource keys that were not processed with the current response . The unprocessesResourceKeys * value is in the same form as ResourceKeys , so the value can be directly provided to a subsequent * BatchGetResourceConfig operation . If there are no unprocessed resource keys , the response contains an * empty unprocessedResourceKeys list . */ public void setUnprocessedResourceKeys ( java . util . Collection < ResourceKey > unprocessedResourceKeys ) { } }
if ( unprocessedResourceKeys == null ) { this . unprocessedResourceKeys = null ; return ; } this . unprocessedResourceKeys = new com . amazonaws . internal . SdkInternalList < ResourceKey > ( unprocessedResourceKeys ) ;
public class MapBuilder { /** * Puts all entries from otherMap into the map . * @ param otherMap Map to populate entries from . * @ return This builder . */ public MapBuilder < K , V > putAll ( Map < K , V > otherMap ) { } }
map . putAll ( otherMap ) ; return this ;
public class Util { /** * Given a class name return an object of that class . * The class parameter is used to check that the * named class is an instance of that class . * @ param className String class name * @ param cl Class expected * @ return Object checked to be an instance of that class * @ throws Exception */ public static Object getObject ( final String className , final Class cl ) throws Exception { } }
try { Object o = Class . forName ( className ) . newInstance ( ) ; if ( o == null ) { throw new Exception ( "Class " + className + " not found" ) ; } if ( ! cl . isInstance ( o ) ) { throw new Exception ( "Class " + className + " is not a subclass of " + cl . getName ( ) ) ; } return o ; } catch ( Exception e ) { throw e ; } catch ( Throwable t ) { throw new Exception ( t ) ; }
public class DBaseFileAttributePool { /** * Get the attribute pool that corresponds to the specified file . * @ param dbaseFile is the file from which the attributes could be extracted . * @ return the pool associated to the given file . */ static DBaseFileAttributePool getPool ( URL dbaseFile ) { } }
DBaseFileAttributePool pool = null ; if ( dbaseFile != null ) { if ( pools == null ) { pools = new WeakValueTreeMap < > ( ) ; } pool = pools . get ( dbaseFile . toExternalForm ( ) ) ; if ( pool == null ) { pool = new DBaseFileAttributePool ( dbaseFile ) ; pools . put ( dbaseFile . toExternalForm ( ) , pool ) ; } } return pool ;
public class PeerManager { /** * from interface PeerProvider */ public void invokeRequest ( ClientObject caller , byte [ ] serializedAction , InvocationService . ResultListener listener ) { } }
NodeRequest request = null ; try { ObjectInputStream oin = new ObjectInputStream ( new ByteArrayInputStream ( serializedAction ) ) ; request = ( NodeRequest ) oin . readObject ( ) ; _injector . injectMembers ( request ) ; request . invoke ( listener ) ; } catch ( Exception e ) { log . warning ( "Failed to execute node request" , "from" , ( caller == null ) ? "self" : caller . who ( ) , "request" , request , "serializedSize" , serializedAction . length , e ) ; listener . requestFailed ( "Failed to execute node request" ) ; }
public class CenteringTransform { /** * Change the coordinate system of { @ code y } from the * " centered " graphical coordinate system to the global document coordinate system . * @ param y the y graphical coordinate to convert . * @ return the x coordinate in the global coordinate system . */ @ Pure public double toGlobalY ( double y ) { } }
final double adjustedY = y - this . translationY . get ( ) ; return this . invertY . get ( ) ? - adjustedY : adjustedY ;
public class CPDefinitionLocalizationPersistenceImpl { /** * Returns the number of cp definition localizations . * @ return the number of cp definition localizations */ @ Override public int countAll ( ) { } }
Long count = ( Long ) finderCache . getResult ( FINDER_PATH_COUNT_ALL , FINDER_ARGS_EMPTY , this ) ; if ( count == null ) { Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( _SQL_COUNT_CPDEFINITIONLOCALIZATION ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( FINDER_PATH_COUNT_ALL , FINDER_ARGS_EMPTY , count ) ; } catch ( Exception e ) { finderCache . removeResult ( FINDER_PATH_COUNT_ALL , FINDER_ARGS_EMPTY ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ;
public class BlockInStream { /** * Creates a { @ link BlockInStream } to read from a specific remote server . Should only be used * in cases where the data source and method of reading is known , ie . worker - worker * communication . * @ param context the file system context * @ param blockId the block id * @ param address the address of the gRPC data server * @ param blockSource the source location of the block * @ param blockSize the size of the block * @ param ufsOptions the ufs read options * @ return the { @ link BlockInStream } created */ public static BlockInStream createRemoteBlockInStream ( FileSystemContext context , long blockId , WorkerNetAddress address , BlockInStreamSource blockSource , long blockSize , Protocol . OpenUfsBlockOptions ufsOptions ) { } }
long chunkSize = context . getClusterConf ( ) . getBytes ( PropertyKey . USER_NETWORK_READER_CHUNK_SIZE_BYTES ) ; ReadRequest readRequest = ReadRequest . newBuilder ( ) . setBlockId ( blockId ) . setOpenUfsBlockOptions ( ufsOptions ) . setChunkSize ( chunkSize ) . buildPartial ( ) ; DataReader . Factory factory = new GrpcDataReader . Factory ( context , address , readRequest . toBuilder ( ) . buildPartial ( ) ) ; return new BlockInStream ( factory , address , blockSource , blockId , blockSize ) ;
public class KieModuleDeploymentHelperImpl { /** * Create a KJar for deployment ; * @ param releaseId Release ( deployment ) id . * @ param resourceFilePaths List of resource file paths * @ param kbaseName The name of the { @ link KieBase } * @ param ksessionName The name of the { @ link KieSession } . * @ param dependencies List of dependencies to add * @ return The { @ link InternalKieModule } which represents the KJar . */ private synchronized KieModule internalCreateKieJar ( ReleaseId releaseId , String kbaseName , String ksessionName , List < String > resourceFilePaths , List < Class < ? > > classes , List < String > dependencies ) { } }
ReleaseId [ ] releaseIds = { } ; if ( dependencies != null && dependencies . size ( ) > 0 ) { List < ReleaseId > depReleaseIds = new ArrayList < ReleaseId > ( ) ; for ( String dep : dependencies ) { String [ ] gav = dep . split ( ":" ) ; if ( gav . length != 3 ) { throw new IllegalArgumentException ( "Dependendency id '" + dep + "' does not conform to the format <groupId>:<artifactId>:<version> (Classifiers are not accepted)." ) ; } depReleaseIds . add ( new ReleaseIdImpl ( gav [ 0 ] , gav [ 1 ] , gav [ 2 ] ) ) ; } releaseIds = depReleaseIds . toArray ( new ReleaseId [ depReleaseIds . size ( ) ] ) ; } config . pomText = getPomText ( releaseId , releaseIds ) ; KieFileSystem kfs = createKieFileSystemWithKProject ( kbaseName , ksessionName ) ; kfs . writePomXML ( this . config . pomText ) ; List < KJarResource > resourceFiles = loadResources ( resourceFilePaths ) ; for ( KJarResource resource : resourceFiles ) { kfs . write ( "src/main/resources/" + kbaseName + "/" + resource . name , resource . content ) ; } if ( classes != null ) { for ( Class < ? > userClass : classes ) { addClass ( userClass , kfs ) ; } } KieBuilder kieBuilder = config . getKieServicesInstance ( ) . newKieBuilder ( kfs ) ; int buildMsgs = 0 ; for ( Message buildMsg : kieBuilder . buildAll ( ) . getResults ( ) . getMessages ( ) ) { System . out . println ( buildMsg . getPath ( ) + " : " + buildMsg . getText ( ) ) ; ++ buildMsgs ; } if ( buildMsgs > 0 ) { throw new RuntimeException ( "Unable to build KieModule, see the " + buildMsgs + " messages above." ) ; } return ( InternalKieModule ) kieBuilder . getKieModule ( ) ;
public class RemoteEventDecoder { /** * Decodes a remote event from the byte array data . * @ param data the byte array data * @ return a remote event object * @ throws RemoteRuntimeException */ @ Override public RemoteEvent < T > decode ( final byte [ ] data ) { } }
final WakeMessagePBuf pbuf ; try { pbuf = WakeMessagePBuf . parseFrom ( data ) ; return new RemoteEvent < T > ( null , null , pbuf . getSeq ( ) , decoder . decode ( pbuf . getData ( ) . toByteArray ( ) ) ) ; } catch ( final InvalidProtocolBufferException e ) { throw new RemoteRuntimeException ( e ) ; }
public class ResolveElasticsearchStep { /** * Resolve the artifact and return a file reference to the local file . */ private File resolveArtifact ( ClusterConfiguration config ) throws ArtifactException , IOException { } }
String flavour = config . getFlavour ( ) ; String version = config . getVersion ( ) ; String artifactId = getArtifactId ( flavour , version ) ; String classifier = getArtifactClassifier ( version ) ; String type = getArtifactType ( version ) ; ElasticsearchArtifact artifactReference = new ElasticsearchArtifact ( artifactId , version , classifier , type ) ; config . getLog ( ) . debug ( "Artifact ref: " + artifactReference ) ; PluginArtifactResolver artifactResolver = config . getArtifactResolver ( ) ; try { config . getLog ( ) . debug ( "Resolving artifact against the local maven repo (stage 1)" ) ; return artifactResolver . resolveArtifact ( artifactReference . getArtifactCoordinates ( ) ) ; } catch ( ArtifactException e ) { config . getLog ( ) . debug ( "Artifact not found; downloading and installing it" ) ; File tempFile = downloadArtifact ( artifactReference , config ) ; config . getLog ( ) . debug ( "Installing " + tempFile + " in the local maven repo" ) ; config . getArtifactInstaller ( ) . installArtifact ( artifactReference . getGroupId ( ) , artifactReference . getArtifactId ( ) , artifactReference . getVersion ( ) , artifactReference . getClassifier ( ) , artifactReference . getType ( ) , tempFile ) ; config . getLog ( ) . debug ( "Resolving artifact against the local maven repo (stage 2)" ) ; return artifactResolver . resolveArtifact ( artifactReference . getArtifactCoordinates ( ) ) ; }
public class RestoreCertificateAuthorityRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RestoreCertificateAuthorityRequest restoreCertificateAuthorityRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( restoreCertificateAuthorityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( restoreCertificateAuthorityRequest . getCertificateAuthorityArn ( ) , CERTIFICATEAUTHORITYARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTextureType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTextureType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/appearance/1.0" , name = "_Texture" , substitutionHeadNamespace = "http://www.opengis.net/citygml/appearance/1.0" , substitutionHeadName = "_SurfaceData" ) public JAXBElement < AbstractTextureType > create_Texture ( AbstractTextureType value ) { } }
return new JAXBElement < AbstractTextureType > ( __Texture_QNAME , AbstractTextureType . class , null , value ) ;
public class ComThread { /** * Adds a { @ link Com4jObject } to the live objects of this { @ link ComThread } * This method increases the live object count of this thread and fires an * { @ link ComObjectListener # onNewObject ( Com4jObject ) } event to all listeners . * @ param r The new { @ link Com4jObject } */ public synchronized void addLiveObject ( Com4jObject r ) { } }
// TODO : why is this public ? if ( r instanceof Wrapper ) { liveComObjects . add ( ( ( Wrapper ) r ) . ref ) ; } if ( ! listeners . isEmpty ( ) ) { for ( int i = listeners . size ( ) - 1 ; i >= 0 ; i -- ) listeners . get ( i ) . onNewObject ( r ) ; }
public class RPCHelper { /** * public static SyncObject syncObject = new SyncObject ( " MapSync " ) ; */ public static < I , T extends I > void registerInterface ( final Class < I > interfaceClass , final T instance , final RSBLocalServer server ) throws CouldNotPerformException { } }
for ( final Method method : interfaceClass . getMethods ( ) ) { if ( method . getAnnotation ( RPCMethod . class ) != null ) { boolean legacy = false ; try { legacy = JPService . getProperty ( JPRSBLegacyMode . class ) . getValue ( ) ; } catch ( JPNotAvailableException e ) { // if not available just register legacy methods } // if legacy register always , else only register if not marked as legacy if ( legacy || ! method . getAnnotation ( RPCMethod . class ) . legacy ( ) ) { registerMethod ( method , instance , server ) ; } } }
public class Console { /** * Changes node path in the URL displayed by browser . * @ param path the path to the node . * @ param changeHistory if true store URL changes in browser ' s history . */ public void changePathInURL ( String path , boolean changeHistory ) { } }
jcrURL . setPath ( path ) ; if ( changeHistory ) { htmlHistory . newItem ( jcrURL . toString ( ) , false ) ; }
public class DeploymentOperations { /** * Creates an operation to replace deployment content to a running server . The previous content is undeployed , then * the new content is deployed , followed by the previous content being removed . * @ param deployments the set deployment used to replace existing deployments which match the same name * @ return the deploy operation */ public static Operation createReplaceOperation ( final Set < Deployment > deployments ) { } }
Assertions . requiresNotNullOrNotEmptyParameter ( "deployments" , deployments ) ; final CompositeOperationBuilder builder = CompositeOperationBuilder . create ( true ) ; for ( Deployment deployment : deployments ) { addReplaceOperationSteps ( builder , deployment ) ; } return builder . build ( ) ;
public class LiKafkaSchemaRegistry { /** * Register a schema to the Kafka schema registry * @ param schema * @ param post * @ return schema ID of the registered schema * @ throws SchemaRegistryException if registration failed */ public synchronized MD5Digest register ( Schema schema , PostMethod post ) throws SchemaRegistryException { } }
// Change namespace if override specified if ( this . namespaceOverride . isPresent ( ) ) { schema = AvroUtils . switchNamespace ( schema , this . namespaceOverride . get ( ) ) ; } LOG . info ( "Registering schema " + schema . toString ( ) ) ; post . addParameter ( "schema" , schema . toString ( ) ) ; HttpClient httpClient = this . borrowClient ( ) ; try { LOG . debug ( "Loading: " + post . getURI ( ) ) ; int statusCode = httpClient . executeMethod ( post ) ; if ( statusCode != HttpStatus . SC_CREATED ) { throw new SchemaRegistryException ( "Error occurred while trying to register schema: " + statusCode ) ; } String response ; response = post . getResponseBodyAsString ( ) ; if ( response != null ) { LOG . info ( "Received response " + response ) ; } String schemaKey ; Header [ ] headers = post . getResponseHeaders ( SCHEMA_ID_HEADER_NAME ) ; if ( headers . length != 1 ) { throw new SchemaRegistryException ( "Error reading schema id returned by registerSchema call: headers.length = " + headers . length ) ; } else if ( ! headers [ 0 ] . getValue ( ) . startsWith ( SCHEMA_ID_HEADER_PREFIX ) ) { throw new SchemaRegistryException ( "Error parsing schema id returned by registerSchema call: header = " + headers [ 0 ] . getValue ( ) ) ; } else { LOG . info ( "Registered schema successfully" ) ; schemaKey = headers [ 0 ] . getValue ( ) . substring ( SCHEMA_ID_HEADER_PREFIX . length ( ) ) ; } MD5Digest schemaId = MD5Digest . fromString ( schemaKey ) ; return schemaId ; } catch ( Throwable t ) { throw new SchemaRegistryException ( t ) ; } finally { post . releaseConnection ( ) ; this . httpClientPool . returnObject ( httpClient ) ; }
public class Quaternionf { /** * Set this { @ link Quaternionf } to a rotation of the given angle in radians about the supplied * axis , all of which are specified via the { @ link AxisAngle4f } . * @ see # rotationAxis ( float , float , float , float ) * @ param axisAngle * the { @ link AxisAngle4f } giving the rotation angle in radians and the axis to rotate about * @ return this */ public Quaternionf rotationAxis ( AxisAngle4f axisAngle ) { } }
return rotationAxis ( axisAngle . angle , axisAngle . x , axisAngle . y , axisAngle . z ) ;
public class Expressive { /** * Return the subject when a polled sample of the feature is { @ code true } . * Uses a default ticker . */ public < S > S when ( S subject , Feature < ? super S , Boolean > feature ) { } }
return when ( subject , feature , eventually ( ) , isQuietlyTrue ( ) ) ;
public class MatcherLazyAssert { public static < T > void assertThat ( String reason , T actual , Matcher < ? super T > matcher ) { } }
if ( ! matcher . matches ( actual ) ) { Description description = new StringDescription ( ) ; description . appendText ( reason ) . appendText ( "\nExpected: " ) . appendDescriptionOf ( matcher ) . appendText ( "\n but: " ) ; matcher . describeMismatch ( actual , description ) ; throw new LazyAssertionError ( description . toString ( ) ) ; }
public class RocksDbUtils { /** * Builds RocksDb { @ link ReadOptions } . * @ param isTailing * @ return */ public static ReadOptions buildReadOptions ( boolean isTailing ) { } }
ReadOptions readOptions = new ReadOptions ( ) ; readOptions . setTailing ( isTailing ) ; return readOptions ;
public class AsyncLibrary { /** * @ see com . ibm . io . async . IAsyncProvider # terminateIOCB ( com . ibm . io . async . CompletionKey ) */ @ Override public void terminateIOCB ( CompletionKey theKey ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "terminateIOCB" ) ; } // termIOCB will cross into native code , so do not call termIOCB // on unix platforms , since they don ' t do anything in this method if ( doNativeIOCBInitAndTerm && AIO_INITIALIZED == aioInitialized ) { synchronized ( oneAtATime ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "have lock" ) ; } if ( aioInitialized == AIO_INITIALIZED ) { try { aio_termIOCB ( theKey . getAddress ( ) ) ; } catch ( AsyncException ae ) { // just log the error and go on if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error occured while terminating IOCB" + ae . getMessage ( ) ) ; } } catch ( Throwable t ) { if ( aioInitialized != AIO_SHUTDOWN ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "caught throwable: " + t ) ; } throw new RuntimeException ( "Throwable caught from aio dll: aio_termIOCB: " + t . getMessage ( ) ) ; } } } } // end - sync } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "terminateIOCB" ) ; }
public class ApiOvhCloud { /** * Update a volume * REST : PUT / cloud / project / { serviceName } / volume / { volumeId } * @ param description [ required ] Volume description * @ param name [ required ] Volume name * @ param serviceName [ required ] Project id * @ param volumeId [ required ] Volume id */ public OvhVolume project_serviceName_volume_volumeId_PUT ( String serviceName , String volumeId , String description , String name ) throws IOException { } }
String qPath = "/cloud/project/{serviceName}/volume/{volumeId}" ; StringBuilder sb = path ( qPath , serviceName , volumeId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "description" , description ) ; addBody ( o , "name" , name ) ; String resp = exec ( qPath , "PUT" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhVolume . class ) ;
public class Config { /** * Removes a configuration variable from the given scope . * < p > Removing a configuration variable is performed as if each scope had * its own namespace , that is , the same configuration variable name in one * scope does not impact one stored in a different scope . * @ param pc Page context from which the configuration variable is to be * removed * @ param name Configuration variable name * @ param scope Scope from which the configuration variable is to be * removed */ public static void remove ( PageContext pc , String name , int scope ) { } }
switch ( scope ) { case PageContext . PAGE_SCOPE : pc . removeAttribute ( name + PAGE_SCOPE_SUFFIX , scope ) ; break ; case PageContext . REQUEST_SCOPE : pc . removeAttribute ( name + REQUEST_SCOPE_SUFFIX , scope ) ; break ; case PageContext . SESSION_SCOPE : pc . removeAttribute ( name + SESSION_SCOPE_SUFFIX , scope ) ; break ; case PageContext . APPLICATION_SCOPE : pc . removeAttribute ( name + APPLICATION_SCOPE_SUFFIX , scope ) ; break ; default : throw new IllegalArgumentException ( "unknown scope" ) ; }
public class Transform3D { /** * Rotate the object . * This function is equivalent to ( where r is the translation * of the quaternion as a 3x3 matrix ) : * < pre > * this = this * [ r r r 0 ] * [ r r r 0 ] * [ r r r 0 ] * [ 0 0 0 1 ] * < / pre > * @ param rotation */ public void rotate ( Quaternion rotation ) { } }
Transform3D m = new Transform3D ( ) ; m . makeRotationMatrix ( rotation ) ; mul ( m ) ;
public class QueryBuilder { /** * sorts are a bit more awkward and need a helper . . . */ private static String quoteSort ( Sort [ ] sort ) { } }
LinkedList < String > sorts = new LinkedList < String > ( ) ; for ( Sort pair : sort ) { sorts . add ( String . format ( "{%s: %s}" , Helpers . quote ( pair . getName ( ) ) , Helpers . quote ( pair . getOrder ( ) . toString ( ) ) ) ) ; } return sorts . toString ( ) ;
public class DescribeDhcpOptionsRequest { /** * The IDs of one or more DHCP options sets . * Default : Describes all your DHCP options sets . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDhcpOptionsIds ( java . util . Collection ) } or { @ link # withDhcpOptionsIds ( java . util . Collection ) } if you want * to override the existing values . * @ param dhcpOptionsIds * The IDs of one or more DHCP options sets . < / p > * Default : Describes all your DHCP options sets . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDhcpOptionsRequest withDhcpOptionsIds ( String ... dhcpOptionsIds ) { } }
if ( this . dhcpOptionsIds == null ) { setDhcpOptionsIds ( new com . amazonaws . internal . SdkInternalList < String > ( dhcpOptionsIds . length ) ) ; } for ( String ele : dhcpOptionsIds ) { this . dhcpOptionsIds . add ( ele ) ; } return this ;
public class ComputeNodeRebootHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the ComputeNodeRebootHeaders object itself . */ public ComputeNodeRebootHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class CacheHandler { /** * 回滚当前事务写入的缓存 */ public static void rollbackCache ( ) { } }
List < String > keys = TransactionWriteCacheKeys . get ( ) ; if ( keys == null ) return ; for ( String key : keys ) { getCacheProvider ( ) . remove ( key ) ; }
public class ElasticsearchDomainStatusMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ElasticsearchDomainStatus elasticsearchDomainStatus , ProtocolMarshaller protocolMarshaller ) { } }
if ( elasticsearchDomainStatus == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( elasticsearchDomainStatus . getDomainId ( ) , DOMAINID_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getDomainName ( ) , DOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getARN ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getCreated ( ) , CREATED_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getDeleted ( ) , DELETED_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getEndpoint ( ) , ENDPOINT_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getEndpoints ( ) , ENDPOINTS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getProcessing ( ) , PROCESSING_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getUpgradeProcessing ( ) , UPGRADEPROCESSING_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getElasticsearchVersion ( ) , ELASTICSEARCHVERSION_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getElasticsearchClusterConfig ( ) , ELASTICSEARCHCLUSTERCONFIG_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getEBSOptions ( ) , EBSOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getAccessPolicies ( ) , ACCESSPOLICIES_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getSnapshotOptions ( ) , SNAPSHOTOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getVPCOptions ( ) , VPCOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getCognitoOptions ( ) , COGNITOOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getEncryptionAtRestOptions ( ) , ENCRYPTIONATRESTOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getNodeToNodeEncryptionOptions ( ) , NODETONODEENCRYPTIONOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getAdvancedOptions ( ) , ADVANCEDOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getLogPublishingOptions ( ) , LOGPUBLISHINGOPTIONS_BINDING ) ; protocolMarshaller . marshall ( elasticsearchDomainStatus . getServiceSoftwareOptions ( ) , SERVICESOFTWAREOPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcThermalConductivityMeasure ( ) { } }
if ( ifcThermalConductivityMeasureEClass == null ) { ifcThermalConductivityMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 880 ) ; } return ifcThermalConductivityMeasureEClass ;
public class Base32 { /** * Returns the base 32 encoding of the given length from a { @ link Long } * geohash . * @ param i * the geohash * @ param length * the length of the returned hash * @ return the string geohash */ public static String encodeBase32 ( long i , int length ) { } }
char [ ] buf = new char [ 65 ] ; int charPos = 64 ; boolean negative = ( i < 0 ) ; if ( ! negative ) i = - i ; while ( i <= - 32 ) { buf [ charPos -- ] = characters [ ( int ) ( - ( i % 32 ) ) ] ; i /= 32 ; } buf [ charPos ] = characters [ ( int ) ( - i ) ] ; String result = padLeftWithZerosToLength ( new String ( buf , charPos , ( 65 - charPos ) ) , length ) ; if ( negative ) return "-" + result ; else return result ;
public class ParticleIO { /** * Save a single emitter to the XML file * @ param out * The location to which we should save * @ param emitter * The emitter to store to the XML file * @ throws IOException * Indicates a failure to write or encode the XML */ public static void saveEmitter ( OutputStream out , ConfigurableEmitter emitter ) throws IOException { } }
try { DocumentBuilder builder = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; Document document = builder . newDocument ( ) ; document . appendChild ( emitterToElement ( document , emitter ) ) ; Result result = new StreamResult ( new OutputStreamWriter ( out , "utf-8" ) ) ; DOMSource source = new DOMSource ( document ) ; TransformerFactory factory = TransformerFactory . newInstance ( ) ; Transformer xformer = factory . newTransformer ( ) ; xformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; xformer . transform ( source , result ) ; } catch ( Exception e ) { Log . error ( e ) ; throw new IOException ( "Failed to save emitter" ) ; }
public class CatalogUtil { /** * Iterate through all the tables in the catalog , find a table with an id that matches the * given table id , and return its name . * @ param catalog Catalog database * @ param tableId table id * @ return table name associated with the given table id ( null if no association is found ) */ public static String getTableNameFromId ( Database catalog , int tableId ) { } }
String tableName = null ; for ( Table table : catalog . getTables ( ) ) { if ( table . getRelativeIndex ( ) == tableId ) { tableName = table . getTypeName ( ) ; } } return tableName ;
public class StringHelper { /** * Clean the generated alias by removing any non - alpha characters from the * beginning . * @ param alias The generated alias to be cleaned . * @ return The cleaned alias , stripped of any leading non - alpha characters . */ private static String cleanAlias ( String alias ) { } }
char [ ] chars = alias . toCharArray ( ) ; // short cut check . . . if ( ! Character . isLetter ( chars [ 0 ] ) ) { for ( int i = 1 ; i < chars . length ; i ++ ) { // as soon as we encounter our first letter , return the substring // from that position if ( Character . isLetter ( chars [ i ] ) ) { return alias . substring ( i ) ; } } } return alias ;
public class Octahedron { /** * Returns the vertices of an n - fold polygon of given radius and center * @ param n * @ param radius * @ param center * @ return */ @ Override public Point3d [ ] getVertices ( ) { } }
Point3d [ ] octahedron = new Point3d [ 6 ] ; octahedron [ 0 ] = new Point3d ( - cirumscribedRadius , 0 , 0 ) ; octahedron [ 1 ] = new Point3d ( cirumscribedRadius , 0 , 0 ) ; octahedron [ 2 ] = new Point3d ( 0 , - cirumscribedRadius , 0 ) ; octahedron [ 3 ] = new Point3d ( 0 , cirumscribedRadius , 0 ) ; octahedron [ 4 ] = new Point3d ( 0 , 0 , - cirumscribedRadius ) ; octahedron [ 5 ] = new Point3d ( 0 , 0 , cirumscribedRadius ) ; return octahedron ;
public class SyntheticProperty { /** * Returns all the added accessor annotation descriptors in an unmodifiable list . */ public List < String > getAccessorAnnotationDescriptors ( ) { } }
if ( mAnnotationDescs == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( mAnnotationDescs ) ;
public class BCUtil { /** * 解码恢复EC压缩公钥 , 支持Base64和Hex编码 , ( 基于BouncyCastle ) < br > * 见 : https : / / www . cnblogs . com / xinzhao / p / 8963724 . html * @ param encode 压缩公钥 * @ param curveName EC曲线名 * @ since 4.4.4 */ public static PublicKey decodeECPoint ( String encode , String curveName ) { } }
return decodeECPoint ( SecureUtil . decode ( encode ) , curveName ) ;
public class VrAppSettings { /** * Enable the use of the given controller type by * adding it to the cursor controller types list . * @ param controllerType GVRControllerType to add to the list */ public void addControllerType ( GVRControllerType controllerType ) { } }
if ( cursorControllerTypes == null ) { cursorControllerTypes = new ArrayList < GVRControllerType > ( ) ; } else if ( cursorControllerTypes . contains ( controllerType ) ) { return ; } cursorControllerTypes . add ( controllerType ) ;
public class WordVectorSerializer { /** * This method reads vocab cache from provided file . * Please note : it reads only vocab content , so it ' s suitable mostly for BagOfWords / TF - IDF vectorizers * @ param file * @ return * @ throws IOException */ public static VocabCache < VocabWord > readVocabCache ( @ NonNull File file ) throws IOException { } }
try ( FileInputStream fis = new FileInputStream ( file ) ) { return readVocabCache ( fis ) ; }
public class CaffeineSpec { /** * Configures expire after access . */ void expireAfterAccess ( String key , @ Nullable String value ) { } }
requireArgument ( expireAfterAccessDuration == UNSET_INT , "expireAfterAccess was already set" ) ; expireAfterAccessDuration = parseDuration ( key , value ) ; expireAfterAccessTimeUnit = parseTimeUnit ( key , value ) ;
public class RefasterScanner { /** * Matching on the parentheses surrounding the condition of an if , while , or do - while * is nonsensical , as those parentheses are obligatory and should never be changed . */ @ Override public Void visitDoWhileLoop ( DoWhileLoopTree node , Context context ) { } }
scan ( node . getStatement ( ) , context ) ; scan ( SKIP_PARENS . visit ( node . getCondition ( ) , null ) , context ) ; return null ;
public class CommandFaceDescriptor { /** * { @ inheritDoc } */ public void setBackground ( Color background ) { } }
Color old = this . background ; this . background = background ; firePropertyChange ( BACKGROUND_PROPERTY , old , this . background ) ;
public class ModuleSetter { /** * Creating an Guice Module based on the parameters set . * @ return the { @ link AbstractModule } to be set */ public AbstractModule createModule ( ) { } }
return new AbstractModule ( ) { @ Override protected void configure ( ) { bind ( IDataFactory . class ) . to ( mDataFacClass ) ; bind ( IMetaEntryFactory . class ) . to ( mMetaFacClass ) ; bind ( IRevisioning . class ) . to ( mRevisioningClass ) ; bind ( IByteHandlerPipeline . class ) . toInstance ( mByteHandler ) ; install ( new FactoryModuleBuilder ( ) . implement ( IBackend . class , mBackend ) . build ( IBackendFactory . class ) ) ; install ( new FactoryModuleBuilder ( ) . build ( IResourceConfigurationFactory . class ) ) ; bind ( Key . class ) . toInstance ( mKey ) ; install ( new FactoryModuleBuilder ( ) . build ( ISessionConfigurationFactory . class ) ) ; } } ;
public class GDeferredRequest { @ SuppressWarnings ( "unchecked" ) protected void triggerDone ( Response < T > resolved ) { } }
for ( io . reinert . gdeferred . DoneCallback < T > callback : getDoneCallbacks ( ) ) { try { if ( callback instanceof DoneCallback ) { ( ( DoneCallback ) callback ) . onDone ( resolved ) ; } else { callback . onDone ( resolved . getPayload ( ) ) ; } } catch ( Exception e ) { log . log ( Level . SEVERE , "An uncaught exception occurred in a DoneCallback" , e ) ; } }
public class HalShopClient { /** * Returns the REST resource identified by { @ code uri } as a JSON string . * @ param link the non - templated Link of the resource * @ return json */ private String getHalJson ( final Link link ) throws IOException { } }
try { final HttpGet httpget = new HttpGet ( link . getHref ( ) ) ; if ( link . getType ( ) . isEmpty ( ) ) { httpget . addHeader ( "Accept" , "application/hal+json" ) ; } else { httpget . addHeader ( "Accept" , link . getType ( ) ) ; } System . out . println ( "| ------------- Request --------------" ) ; System . out . println ( "| " + httpget . getRequestLine ( ) ) ; final HttpEntity entity = httpclient . execute ( httpget ) . getEntity ( ) ; final String json = EntityUtils . toString ( entity ) ; System . out . println ( "| ------------- Response -------------" ) ; System . out . println ( "| " + json ) ; return json ; } catch ( final IOException e ) { System . out . println ( "\nPlease start example-springboot Server before you are running the Client.\n" ) ; throw e ; }
public class UnilateralSortMerger { /** * Creates the reading thread . The reading thread simply reads the data off the input and puts it * into the buffer where it will be sorted . * The returned thread is not yet started . * @ param exceptionHandler * The handler for exceptions in the thread . * @ param reader * The reader from which the thread reads . * @ param queues * The queues through which the thread communicates with the other threads . * @ param parentTask * The task at which the thread registers itself ( for profiling purposes ) . * @ param serializer * The serializer used to serialize records . * @ param startSpillingBytes * The number of bytes after which the reader thread will send the notification to * start the spilling . * @ return The thread that reads data from an input , writes it into sort buffers and puts * them into a queue . */ protected ThreadBase < E > getReadingThread ( ExceptionHandler < IOException > exceptionHandler , MutableObjectIterator < E > reader , CircularQueues < E > queues , AbstractInvokable parentTask , TypeSerializer < E > serializer , long startSpillingBytes ) { } }
return new ReadingThread < E > ( exceptionHandler , reader , queues , serializer . createInstance ( ) , parentTask , startSpillingBytes ) ;
public class BucketConfigurationXmlFactory { /** * Converts the specified accelerate configuration into an XML byte array . * @ param accelerateConfiguration * The configuration to convert . * @ return The XML byte array representation . */ public byte [ ] convertToXmlByteArray ( BucketAccelerateConfiguration accelerateConfiguration ) { } }
XmlWriter xml = new XmlWriter ( ) ; xml . start ( "AccelerateConfiguration" , "xmlns" , Constants . XML_NAMESPACE ) ; xml . start ( "Status" ) . value ( accelerateConfiguration . getStatus ( ) ) . end ( ) ; xml . end ( ) ; return xml . getBytes ( ) ;
public class DOValidatorImpl { /** * Do Schematron rules validation on the Fedora object . Schematron * validation tests the object against a set of rules expressed using XPATH * in a Schematron schema . These test for things that are beyond what can be * expressed using XML Schema . * @ param objectAsFile * The digital object provided as a file . * @ param schemaPath * Location of the Schematron rules file . * @ param preprocessorPath * Location of Schematron preprocessing stylesheet * @ param phase * The workflow phase ( ingest , store ) for the object . * @ throws ObjectValidityException * If validation fails for any reason . * @ throws GeneralException * If validation fails for any reason . */ private void validateByRules ( InputStream objectAsStream , String ruleSchemaPath , String preprocessorPath , String phase ) throws ObjectValidityException , GeneralException { } }
try { DOValidatorSchematron schtron = new DOValidatorSchematron ( ruleSchemaPath , preprocessorPath , phase ) ; schtron . validate ( objectAsStream ) ; } catch ( ObjectValidityException e ) { logger . error ( "VALIDATE: ERROR - failed Schematron rules validation." , e ) ; throw e ; } catch ( Exception e ) { logger . error ( "VALIDATE: ERROR - failed Schematron rules validation." , e ) ; throw new ObjectValidityException ( "[DOValidatorImpl]: " + "failed Schematron rules validation. " + e . getMessage ( ) ) ; } logger . debug ( "VALIDATE: SUCCESS - passed Schematron rules validation." ) ;
public class RspList { /** * Returns the first value in the response set . This is random , but we try to return a non - null value first */ public T getFirst ( ) { } }
Optional < Rsp < T > > retval = values ( ) . stream ( ) . filter ( rsp -> rsp . getValue ( ) != null ) . findFirst ( ) ; return retval . isPresent ( ) ? retval . get ( ) . getValue ( ) : null ;
public class AsyncActivityImpl { /** * ( non - Javadoc ) * @ see * AsyncActivity # put ( java . net . URI , * java . lang . String , java . lang . String , * Header [ ] , * Credentials ) */ public void put ( URI uri , String mimetype , String content , Header [ ] additionalRequestHeaders , Credentials credentials ) { } }
ra . getExecutorService ( ) . execute ( new AsyncPutStringContentHandler ( ra , handle , uri , mimetype , content , additionalRequestHeaders , credentials ) ) ;
public class ResourceAddressFactory { /** * convenience method only , consider removing from API */ public ResourceAddress newResourceAddress ( String location , String nextProtocol ) { } }
if ( nextProtocol != null ) { ResourceOptions options = ResourceOptions . FACTORY . newResourceOptions ( ) ; options . setOption ( NEXT_PROTOCOL , nextProtocol ) ; return newResourceAddress ( location , options ) ; } else { return newResourceAddress ( location ) ; }
public class LongTupleIterators { /** * Returns an iterator that returns the { @ link MutableLongTuple } s from the * given delegate , wrapped at the given bounds . < br > * @ param bounds The bounds . A copy of this tuple will be stored internally . * @ param delegate The delegate iterator * @ return The iterator */ static Iterator < MutableLongTuple > wrappingIterator ( LongTuple bounds , Iterator < ? extends MutableLongTuple > delegate ) { } }
return wrappingIteratorInternal ( LongTuples . copy ( bounds ) , delegate ) ;
public class OmsIntensityClassifierFlood { /** * VARS DOC END */ @ Execute public void process ( ) throws Exception { } }
if ( ! concatOr ( outIntensity == null , doReset ) ) { return ; } checkNull ( inWaterDepth , inVelocity , pUpperThresVelocityWaterdepth , pUpperThresWaterdepth , pLowerThresVelocityWaterdepth , pLowerThresWaterdepth ) ; // do autoboxing only once double maxWD = pUpperThresWaterdepth ; double maxVWD = pUpperThresVelocityWaterdepth ; double minWD = pLowerThresWaterdepth ; double minVWD = pLowerThresVelocityWaterdepth ; RegionMap regionMap = CoverageUtilities . getRegionParamsFromGridCoverage ( inWaterDepth ) ; int nCols = regionMap . getCols ( ) ; int nRows = regionMap . getRows ( ) ; RandomIter waterdepthIter = CoverageUtilities . getRandomIterator ( inWaterDepth ) ; RandomIter velocityIter = CoverageUtilities . getRandomIterator ( inVelocity ) ; WritableRaster outWR = CoverageUtilities . createWritableRaster ( nCols , nRows , null , null , doubleNovalue ) ; WritableRandomIter outIter = RandomIterFactory . createWritable ( outWR , null ) ; pm . beginTask ( "Processing map..." , nRows ) ; for ( int r = 0 ; r < nRows ; r ++ ) { if ( isCanceled ( pm ) ) { return ; } for ( int c = 0 ; c < nCols ; c ++ ) { double h = waterdepthIter . getSampleDouble ( c , r , 0 ) ; double v = velocityIter . getSampleDouble ( c , r , 0 ) ; if ( isNovalue ( h ) && isNovalue ( v ) ) { continue ; } else if ( ! isNovalue ( h ) && ! isNovalue ( v ) ) { double value = 0.0 ; double vh = v * h ; if ( h > maxWD || vh > maxVWD ) { value = INTENSITY_HIGH ; } else if ( ( h > minWD && h <= maxWD ) || ( vh > minVWD && vh <= maxVWD ) ) { value = INTENSITY_MEDIUM ; } else if ( h <= minWD && vh <= minVWD ) { value = INTENSITY_LOW ; } else { throw new ModelsIllegalargumentException ( "No intensity could be calculated for h = " + h + " and v = " + v , this , pm ) ; } outIter . setSample ( c , r , 0 , value ) ; } else { pm . errorMessage ( "WARNING: a cell was found in which one of velocity and water depth are novalue, while the other not. /nThe maps should be covering the exact same cells. /nGoing on ignoring the cell: " + c + "/" + r ) ; } } pm . worked ( 1 ) ; } pm . done ( ) ; outIntensity = CoverageUtilities . buildCoverage ( "intensity" , outWR , regionMap , inWaterDepth . getCoordinateReferenceSystem ( ) ) ;
public class DeleteWorkteamRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteWorkteamRequest deleteWorkteamRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteWorkteamRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteWorkteamRequest . getWorkteamName ( ) , WORKTEAMNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class onlinkipv6prefix { /** * Use this API to fetch all the onlinkipv6prefix resources that are configured on netscaler . */ public static onlinkipv6prefix [ ] get ( nitro_service service ) throws Exception { } }
onlinkipv6prefix obj = new onlinkipv6prefix ( ) ; onlinkipv6prefix [ ] response = ( onlinkipv6prefix [ ] ) obj . get_resources ( service ) ; return response ;
public class BatchFraction { /** * Creates a new thread - pool using the { @ linkplain # DEFAULT _ THREAD _ POOL _ NAME default name } . The thread - pool is then set as * the default thread - pool for bath jobs . * @ param maxThreads the maximum number of threads to set the pool to * @ param keepAliveTime the time to keep threads alive * @ param keepAliveUnits the time unit for the keep alive time * @ return this fraction */ public BatchFraction defaultThreadPool ( final int maxThreads , final int keepAliveTime , final TimeUnit keepAliveUnits ) { } }
return defaultThreadPool ( DEFAULT_THREAD_POOL_NAME , maxThreads , keepAliveTime , keepAliveUnits ) ;
public class PropertyConstraint { /** * Returns whether a proposition satisfies this constraint . If no * property name or value comparator are specified , this method always * returns < code > true < / code > . * @ param proposition a proposition . Cannot be < code > null < / code > . * @ return < code > true < / code > if the proposition satisfies this constraint , * < code > false < / code > if not . */ public boolean isSatisfiedBy ( Proposition proposition ) { } }
if ( proposition == null ) { throw new IllegalArgumentException ( "proposition cannot be null" ) ; } if ( this . valueComp != null && this . propertyName != null ) { Value value = proposition . getProperty ( this . propertyName ) ; if ( ! valueComp . compare ( value , this . value ) ) { return false ; } } return true ;
public class AdsSoapModule { /** * Configures the factories . * @ param < H > the subclass of { @ link AdsServiceClientFactoryHelper } * @ param < F > the subclass of { @ link BaseAdsServiceClientFactory } * @ param adsServiceClientFactoryTypeLiteral the factory type literal which * contains a { @ link AdsServiceClientFactoryInterface } * @ param adsServiceDescriptorFactoryTypeLiteral the factory type literal * which contains a { @ link AdsServiceDescriptorFactoryInterface } * @ param adsServiceClientTypeLiteral the ads service client literal which * contains a { @ link AdsServiceClient } * @ param adsServiceDescriptorTypeLiteral the ads service descriptor literal * which contains a { @ link AdsServiceDescriptor } * @ param adsServiceClientFactoryHelperClass the { @ link AdsServiceClientFactoryHelper } class * @ param baseAdsServiceClientFactoryClass the { @ link BaseAdsServiceClientFactory } class */ protected < H extends AdsServiceClientFactoryHelper < C , S , D > , F extends BaseAdsServiceClientFactory < C , S , D > > void configureFactories ( TypeLiteral < AdsServiceClientFactoryInterface < C , S , D > > adsServiceClientFactoryTypeLiteral , TypeLiteral < AdsServiceDescriptorFactoryInterface < D > > adsServiceDescriptorFactoryTypeLiteral , TypeLiteral < C > adsServiceClientTypeLiteral , TypeLiteral < D > adsServiceDescriptorTypeLiteral , Class < H > adsServiceClientFactoryHelperClass , Class < F > baseAdsServiceClientFactoryClass ) { } }
install ( new FactoryModule < C , D , S , H , F > ( adsServiceClientFactoryTypeLiteral , adsServiceDescriptorFactoryTypeLiteral , adsServiceClientTypeLiteral , adsServiceDescriptorTypeLiteral , adsServiceClientFactoryHelperClass , baseAdsServiceClientFactoryClass ) ) ;
public class DropSpatialIndexGeneratorGeoDB { /** * Ensures that the table name is populated . */ @ Override public ValidationErrors validate ( final DropSpatialIndexStatement statement , final Database database , final SqlGeneratorChain sqlGeneratorChain ) { } }
final ValidationErrors validationErrors = new ValidationErrors ( ) ; validationErrors . checkRequiredField ( "tableName" , statement . getTableName ( ) ) ; return validationErrors ;
public class CommerceAccountOrganizationRelPersistenceImpl { /** * Returns the first commerce account organization rel in the ordered set where commerceAccountId = & # 63 ; . * @ param commerceAccountId the commerce account ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce account organization rel * @ throws NoSuchAccountOrganizationRelException if a matching commerce account organization rel could not be found */ @ Override public CommerceAccountOrganizationRel findByCommerceAccountId_First ( long commerceAccountId , OrderByComparator < CommerceAccountOrganizationRel > orderByComparator ) throws NoSuchAccountOrganizationRelException { } }
CommerceAccountOrganizationRel commerceAccountOrganizationRel = fetchByCommerceAccountId_First ( commerceAccountId , orderByComparator ) ; if ( commerceAccountOrganizationRel != null ) { return commerceAccountOrganizationRel ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceAccountId=" ) ; msg . append ( commerceAccountId ) ; msg . append ( "}" ) ; throw new NoSuchAccountOrganizationRelException ( msg . toString ( ) ) ;
public class LogBuffer { /** * Return next 64 - bit unsigned long from buffer . ( little - endian ) * @ see mysql - 5.1.60 / include / my _ global . h - uint8korr */ public final BigInteger getUlong64 ( ) { } }
final long long64 = getLong64 ( ) ; return ( long64 >= 0 ) ? BigInteger . valueOf ( long64 ) : BIGINT_MAX_VALUE . add ( BigInteger . valueOf ( 1 + long64 ) ) ;
public class ImmutableOverlappingRangeSet { /** * Returns all { @ link Range } s { @ link Range # isConnected ( Range ) } to the { @ code queryRange } for which * { @ link Range # isEmpty ( ) } is false . Does not preserve multiplicity of { @ link Range } s */ @ Override public Collection < Range < T > > rangesOverlapping ( final Range < T > queryRange ) { } }
final ImmutableSet . Builder < Range < T > > ret = ImmutableSet . builder ( ) ; for ( final Range < T > range : ranges ) { if ( range . isConnected ( queryRange ) && ! range . intersection ( queryRange ) . isEmpty ( ) ) { ret . add ( range ) ; } } return ret . build ( ) ;
public class ProbabilitiesPanel { /** * Construct the combination of evolutionary operators that will be used to evolve the * polygon - based images . * @ param factory A source of polygons . * @ param canvasSize The size of the target image . * @ param rng A source of randomness . * @ return A complex evolutionary operator constructed from simpler operators . */ public EvolutionaryOperator < List < ColouredPolygon > > createEvolutionPipeline ( PolygonImageFactory factory , Dimension canvasSize , Random rng ) { } }
List < EvolutionaryOperator < List < ColouredPolygon > > > operators = new LinkedList < EvolutionaryOperator < List < ColouredPolygon > > > ( ) ; operators . add ( new ListCrossover < ColouredPolygon > ( new ConstantGenerator < Integer > ( 2 ) , crossOverControl . getNumberGenerator ( ) ) ) ; operators . add ( new RemovePolygonMutation ( removePolygonControl . getNumberGenerator ( ) ) ) ; operators . add ( new MovePolygonMutation ( movePolygonControl . getNumberGenerator ( ) ) ) ; operators . add ( new ListOperator < ColouredPolygon > ( new RemoveVertexMutation ( canvasSize , removeVertexControl . getNumberGenerator ( ) ) ) ) ; operators . add ( new ListOperator < ColouredPolygon > ( new AdjustVertexMutation ( canvasSize , moveVertexControl . getNumberGenerator ( ) , new GaussianGenerator ( 0 , 3 , rng ) ) ) ) ; operators . add ( new ListOperator < ColouredPolygon > ( new AddVertexMutation ( canvasSize , addVertexControl . getNumberGenerator ( ) ) ) ) ; operators . add ( new ListOperator < ColouredPolygon > ( new PolygonColourMutation ( changeColourControl . getNumberGenerator ( ) , new GaussianGenerator ( 0 , 20 , rng ) ) ) ) ; operators . add ( new AddPolygonMutation ( addPolygonControl . getNumberGenerator ( ) , factory , 50 ) ) ; return new EvolutionPipeline < List < ColouredPolygon > > ( operators ) ;
public class BaseContentHandler { /** * This method is called after { @ link # startNewElement ( localName ) } if the * localName of { @ link # startNewElement ( localName ) } is equals to any of the * { @ link FeatureType # getElements ( ) } , { @ link FeatureType # getLat ( ) ) } , * { @ link FeatureType # getLon ( ) ) } or { @ link FeatureType # getCombinedLonLat ( ) ) } * then the correspondant event of { @ link JPEContentHandler } is thrownn * @ param arg0 */ public void processValue ( String arg0 ) { } }
// System . out . println ( this . currentKey ) ; // System . out . println ( arg0 ) ; arg0 = normalizeValue ( arg0 ) ; if ( arg0 == null || this . currentFeatureGeoJSON == null || arg0 . trim ( ) . isEmpty ( ) ) return ; // System . out . println ( this . currentKey + " : " + arg0 ) ; FeatureType fType = this . currentFeatureType ; final int size = fType . getElements ( ) . size ( ) ; // HACK for the special case when lon and lat came in an array if ( processedLat && processedLon ) { processedLat = false ; processedLon = false ; } for ( String destProp : fType . getElements ( ) . keySet ( ) ) { if ( hasToBeParsed ( fType , destProp ) ) { checkValidAttribute ( localFilter , arg0 . toString ( ) ) ; fType . getElements ( ) . get ( destProp ) . encode ( arg0 . toString ( ) ) ; if ( writerContentHandler != null ) writerContentHandler . addElementToFeature ( service . encode ( arg0 . toString ( ) ) , destProp , this . currentFeatureGeoJSON ) ; // System . out . println ( destProp + " - - - " + arg0 ) ; contentHandler . addElementToFeature ( service . encode ( arg0 . toString ( ) ) , destProp , this . currentFeature ) ; return ; } } if ( hasToParsePosition ( fType ) && fType . getLon ( ) != null && currentKey . toString ( ) . compareTo ( fType . getLon ( ) ) == 0 && ! processedLon ) { double lon = Utils . formatNumber ( arg0 . toString ( ) , service . getDecimalSeparator ( ) , service . getNumberSeparator ( ) ) ; if ( this . currentGeometryGeoJSON == null ) this . currentGeometryGeoJSON = writerContentHandler . startPoint ( ) ; if ( writerContentHandler != null ) writerContentHandler . addXToPoint ( new Double ( lon ) , this . currentGeometryGeoJSON ) ; contentHandler . addXToPoint ( new Double ( lon ) , this . currentPoint ) ; processedLon = true ; return ; } if ( hasToParsePosition ( fType ) && fType . getLat ( ) != null && currentKey . toString ( ) . compareTo ( fType . getLat ( ) ) == 0 && ! processedLat ) { double lat = Utils . formatNumber ( arg0 . toString ( ) , service . getDecimalSeparator ( ) , service . getNumberSeparator ( ) ) ; if ( this . currentGeometryGeoJSON == null ) this . currentGeometryGeoJSON = writerContentHandler . startPoint ( ) ; if ( writerContentHandler != null ) writerContentHandler . addYToPoint ( new Double ( lat ) , this . currentGeometryGeoJSON ) ; contentHandler . addYToPoint ( new Double ( lat ) , this . currentPoint ) ; processedLat = true ; return ; } try { if ( fType . getCombinedLonLat ( ) != null && currentKey . toString ( ) . compareTo ( fType . getCombinedLonLat ( ) ) == 0 ) { String [ ] latLon = arg0 . toString ( ) . split ( fType . getLonLatSeparator ( ) ) ; int lonPos = 0 ; int latPos = 1 ; if ( fType . getReverseLonLat ( ) ) { lonPos = 1 ; latPos = 0 ; } double lon = Utils . formatNumber ( latLon [ lonPos ] . toString ( ) , service . getDecimalSeparator ( ) , service . getNumberSeparator ( ) ) ; double lat = Utils . formatNumber ( latLon [ latPos ] . toString ( ) , service . getDecimalSeparator ( ) , service . getNumberSeparator ( ) ) ; if ( writerContentHandler != null ) { writerContentHandler . addYToPoint ( lat , this . currentGeometryGeoJSON ) ; writerContentHandler . addXToPoint ( lon , this . currentGeometryGeoJSON ) ; } contentHandler . addYToPoint ( lat , this . currentPoint ) ; contentHandler . addXToPoint ( lon , this . currentPoint ) ; return ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } return ;
public class XBooleanLiteralImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setIsTrue ( boolean newIsTrue ) { } }
boolean oldIsTrue = isTrue ; isTrue = newIsTrue ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XBOOLEAN_LITERAL__IS_TRUE , oldIsTrue , isTrue ) ) ;
public class VirtualNodeSpec { /** * The backends that the virtual node is expected to send outbound traffic to . * @ param backends * The backends that the virtual node is expected to send outbound traffic to . */ public void setBackends ( java . util . Collection < Backend > backends ) { } }
if ( backends == null ) { this . backends = null ; return ; } this . backends = new java . util . ArrayList < Backend > ( backends ) ;
public class InternalXtextParser { /** * InternalXtext . g : 981:1 : ruleAlternatives returns [ EObject current = null ] : ( this _ ConditionalBranch _ 0 = ruleConditionalBranch ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? ) ; */ public final EObject ruleAlternatives ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_ConditionalBranch_0 = null ; EObject lv_elements_3_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 987:2 : ( ( this _ ConditionalBranch _ 0 = ruleConditionalBranch ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? ) ) // InternalXtext . g : 988:2 : ( this _ ConditionalBranch _ 0 = ruleConditionalBranch ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? ) { // InternalXtext . g : 988:2 : ( this _ ConditionalBranch _ 0 = ruleConditionalBranch ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? ) // InternalXtext . g : 989:3 : this _ ConditionalBranch _ 0 = ruleConditionalBranch ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? { newCompositeNode ( grammarAccess . getAlternativesAccess ( ) . getConditionalBranchParserRuleCall_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_25 ) ; this_ConditionalBranch_0 = ruleConditionalBranch ( ) ; state . _fsp -- ; current = this_ConditionalBranch_0 ; afterParserOrEnumRuleCall ( ) ; // InternalXtext . g : 997:3 : ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + ) ? int alt26 = 2 ; int LA26_0 = input . LA ( 1 ) ; if ( ( LA26_0 == 30 ) ) { alt26 = 1 ; } switch ( alt26 ) { case 1 : // InternalXtext . g : 998:4 : ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + { // InternalXtext . g : 998:4 : ( ) // InternalXtext . g : 999:5: { current = forceCreateModelElementAndAdd ( grammarAccess . getAlternativesAccess ( ) . getAlternativesElementsAction_1_0 ( ) , current ) ; } // InternalXtext . g : 1005:4 : ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) ) + int cnt25 = 0 ; loop25 : do { int alt25 = 2 ; int LA25_0 = input . LA ( 1 ) ; if ( ( LA25_0 == 30 ) ) { alt25 = 1 ; } switch ( alt25 ) { case 1 : // InternalXtext . g : 1006:5 : otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) { otherlv_2 = ( Token ) match ( input , 30 , FollowSets000 . FOLLOW_19 ) ; newLeafNode ( otherlv_2 , grammarAccess . getAlternativesAccess ( ) . getVerticalLineKeyword_1_1_0 ( ) ) ; // InternalXtext . g : 1010:5 : ( ( lv _ elements _ 3_0 = ruleConditionalBranch ) ) // InternalXtext . g : 1011:6 : ( lv _ elements _ 3_0 = ruleConditionalBranch ) { // InternalXtext . g : 1011:6 : ( lv _ elements _ 3_0 = ruleConditionalBranch ) // InternalXtext . g : 1012:7 : lv _ elements _ 3_0 = ruleConditionalBranch { newCompositeNode ( grammarAccess . getAlternativesAccess ( ) . getElementsConditionalBranchParserRuleCall_1_1_1_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_25 ) ; lv_elements_3_0 = ruleConditionalBranch ( ) ; state . _fsp -- ; if ( current == null ) { current = createModelElementForParent ( grammarAccess . getAlternativesRule ( ) ) ; } add ( current , "elements" , lv_elements_3_0 , "org.eclipse.xtext.Xtext.ConditionalBranch" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; default : if ( cnt25 >= 1 ) break loop25 ; EarlyExitException eee = new EarlyExitException ( 25 , input ) ; throw eee ; } cnt25 ++ ; } while ( true ) ; } break ; } } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class UserGroupManager { /** * Undelete the user group with the specified ID . * @ param sUserGroupID * The ID of the user group to undelete * @ return { @ link EChange # CHANGED } if the user group was undeleted , * { @ link EChange # UNCHANGED } otherwise . */ @ Nonnull public EChange undeleteUserGroup ( @ Nullable final String sUserGroupID ) { } }
final UserGroup aUserGroup = getOfID ( sUserGroupID ) ; if ( aUserGroup == null ) { AuditHelper . onAuditUndeleteFailure ( UserGroup . OT , sUserGroupID , "no-such-id" ) ; return EChange . UNCHANGED ; } m_aRWLock . writeLock ( ) . lock ( ) ; try { if ( BusinessObjectHelper . setUndeletionNow ( aUserGroup ) . isUnchanged ( ) ) return EChange . UNCHANGED ; internalMarkItemUndeleted ( aUserGroup ) ; } finally { m_aRWLock . writeLock ( ) . unlock ( ) ; } AuditHelper . onAuditUndeleteSuccess ( UserGroup . OT , sUserGroupID ) ; // Execute callback as the very last action m_aCallbacks . forEach ( aCB -> aCB . onUserGroupUndeleted ( aUserGroup ) ) ; return EChange . CHANGED ;
public class WebSocketConnectionManager { /** * Send JSON representation of given data object to all connections of a user * @ param data the data object * @ param username the username */ public void sendJsonToUser ( Object data , String username ) { } }
sendToUser ( JSON . toJSONString ( data ) , username ) ;
public class BannerComponentTree { /** * Parses the banner components and processes them using the nodeCreators in the order they * were originally passed * @ param bannerText to parse * @ return the list of nodes representing the bannerComponents */ private List < BannerComponentNode > parseBannerComponents ( BannerText bannerText ) { } }
int length = 0 ; List < BannerComponentNode > bannerComponentNodes = new ArrayList < > ( ) ; for ( BannerComponents components : bannerText . components ( ) ) { BannerComponentNode node = null ; for ( NodeCreator nodeCreator : nodeCreators ) { if ( nodeCreator . isNodeType ( components ) ) { node = nodeCreator . setupNode ( components , bannerComponentNodes . size ( ) , length , bannerText . modifier ( ) ) ; break ; } } if ( node != null ) { bannerComponentNodes . add ( node ) ; length += components . text ( ) . length ( ) ; } } return bannerComponentNodes ;
public class JMSService { /** * Produces topic message . Uses JmsTemplate to send to local broker and forwards ( based on * demand ) to broker network . * @ param destinationName The destination name . * @ param messageData The message data . * @ param recipients Comma - delimited list of recipient ids . */ public void produceTopicMessage ( String destinationName , String messageData , String recipients ) { } }
Message msg = createObjectMessage ( messageData , "anonymous" , recipients ) ; sendMessage ( destinationName , msg ) ;
public class OAuthManager { /** * permissions */ public List < OAuthPermission > convertScopeToPermissions ( Client client , List < String > scopes ) { } }
List < OAuthPermission > list = new ArrayList < OAuthPermission > ( ) ; for ( String scope : scopes ) { if ( scope . equals ( OAuthConstants . READ_CALENDAR_SCOPE ) ) { list . add ( READ_CALENDAR_PERMISSION ) ; } else if ( scope . startsWith ( OAuthConstants . UPDATE_CALENDAR_SCOPE ) ) { String description = OAuthConstants . UPDATE_CALENDAR_DESCRIPTION ; String hourValue = scope . substring ( OAuthConstants . UPDATE_CALENDAR_SCOPE . length ( ) ) ; if ( hourValue . equals ( "24" ) ) { description += " any time of the day" ; } else { description += hourValue + " o'clock" ; } list . add ( new OAuthPermission ( scope , description ) ) ; } } if ( ! scopes . contains ( OAuthConstants . READ_CALENDAR_SCOPE ) ) { list . add ( READ_CALENDAR_PERMISSION ) ; } return list ;
public class Wiselenium { /** * Decorates a list of webElements . * @ param clazz The class of the decorated elements . Must be either WebElement or a type * annotated with Component or Frame . * @ param webElements The webElements that will be decorated . * @ return The list of decorated elements or an empty list if the type is not supported . * @ since 0.3.0 */ public static < E > List < E > decorateElements ( Class < E > clazz , List < WebElement > webElements ) { } }
List < E > elements = Lists . newArrayList ( ) ; for ( WebElement webElement : webElements ) { E element = decorateElement ( clazz , webElement ) ; if ( element != null ) elements . add ( element ) ; } return elements ;
public class CreateProgrammaticProductTemplates { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } }
ProductTemplateServiceInterface productTemplateService = adManagerServices . get ( session , ProductTemplateServiceInterface . class ) ; NetworkServiceInterface networkService = adManagerServices . get ( session , NetworkServiceInterface . class ) ; // Create a programmatic product template . ProductTemplate productTemplate = new ProductTemplate ( ) ; productTemplate . setName ( "Programmatic product template #" + new Random ( ) . nextInt ( Integer . MAX_VALUE ) ) ; productTemplate . setDescription ( "This product template creates programmatic proposal line items " + "targeting all ad units with product segmentation on geo targeting." ) ; // Set the name macro which will be used to generate the names of the products . // This will create a segmentation based on the line item type , ad unit , and location . productTemplate . setNameMacro ( "<line-item-type> - <ad-unit> - <template-name> - <location>" ) ; // Set the product type so the created proposal line items will be trafficked in Ad Manager . productTemplate . setProductType ( ProductType . DFP ) ; // Set required Marketplace information . ProductTemplateMarketplaceInfo marketplaceInfo = new ProductTemplateMarketplaceInfo ( ) ; marketplaceInfo . setAdExchangeEnvironment ( AdExchangeEnvironment . DISPLAY ) ; productTemplate . setProductTemplateMarketplaceInfo ( marketplaceInfo ) ; productTemplate . setRateType ( RateType . CPM ) ; CreativePlaceholder creativePlaceholder = new CreativePlaceholder ( ) ; creativePlaceholder . setSize ( new Size ( 300 , 250 , false ) ) ; // Set the size of creatives that can be associated with the product template . productTemplate . setCreativePlaceholders ( new CreativePlaceholder [ ] { creativePlaceholder } ) ; // Set the type of proposal line item to be created from the product template . productTemplate . setLineItemType ( LineItemType . STANDARD ) ; // Get the root ad unit ID used to target the whole site . String rootAdUnitId = networkService . getCurrentNetwork ( ) . getEffectiveRootAdUnitId ( ) ; // Create ad unit targeting for the root ad unit ( i . e . the whole network ) . AdUnitTargeting adUnitTargeting = new AdUnitTargeting ( ) ; adUnitTargeting . setAdUnitId ( rootAdUnitId ) ; adUnitTargeting . setIncludeDescendants ( true ) ; // Create geo targeting for the US . Location countryLocation = new Location ( ) ; countryLocation . setId ( 2840L ) ; // Create geo targeting for Hong Kong . Location regionLocation = new Location ( ) ; regionLocation . setId ( 2344L ) ; GeoTargeting geoTargeting = new GeoTargeting ( ) ; geoTargeting . setTargetedLocations ( new Location [ ] { countryLocation , regionLocation } ) ; // Add targeting as product segmentation . ProductSegmentation productSegmentation = new ProductSegmentation ( ) ; productSegmentation . setAdUnitSegments ( new AdUnitTargeting [ ] { adUnitTargeting } ) ; productSegmentation . setGeoSegment ( geoTargeting ) ; productTemplate . setProductSegmentation ( productSegmentation ) ; // Create the product template on the server . ProductTemplate [ ] productTemplates = productTemplateService . createProductTemplates ( new ProductTemplate [ ] { productTemplate } ) ; for ( ProductTemplate createdProductTemplate : productTemplates ) { System . out . printf ( "A programmatic product template with ID %d and name '%s' was created.%n" , createdProductTemplate . getId ( ) , createdProductTemplate . getName ( ) ) ; }
public class DescribeVoicesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeVoicesRequest describeVoicesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeVoicesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeVoicesRequest . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; protocolMarshaller . marshall ( describeVoicesRequest . getIncludeAdditionalLanguageCodes ( ) , INCLUDEADDITIONALLANGUAGECODES_BINDING ) ; protocolMarshaller . marshall ( describeVoicesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PathAndQuery { /** * According to RFC 3986 section 3.3 , path can contain a colon , except the first segment . * < p > Should allow the asterisk character in the path , query , or fragment components of a URL ( RFC2396 ) . * @ see < a href = " https : / / tools . ietf . org / html / rfc3986 # section - 3.3 " > RFC 3986 , section 3.3 < / a > */ private static boolean firstPathComponentContainsColon ( Bytes path ) { } }
final int length = path . length ; for ( int i = 1 ; i < length ; i ++ ) { final byte b = path . data [ i ] ; if ( b == '/' ) { break ; } if ( b == ':' ) { return true ; } } return false ;
public class DistributedIdQueue { /** * Put an item into the queue with the given Id < br > < br > * NOTE : if an upper bound was set via { @ link QueueBuilder # maxItems } , this method will * block until there is available space in the queue . * @ param item item * @ param itemId item Id * @ throws Exception errors */ public void put ( T item , String itemId ) throws Exception { } }
put ( item , itemId , 0 , null ) ;
public class AtomicSharedReference { /** * Call some function f on a threadsafe copy of the reference we are storing . * Should be used if you expect the function to take a while to run . * Saving the value of T after this call returns is COMPLETELY UNSAFE . Don ' t do it . * @ param f lambda ( T x ) * @ param < Z > Return type ; & lt ; ? extends Object & gt ; * @ return result of f * @ throws IOException if closing the local reference throws . */ public @ Nullable < Z > Z mapWithCopy ( Function < T , Z > f ) throws IOException { } }
final @ Nullable SharedReference < T > localRef = getCopy ( ) ; try { if ( localRef == null ) { return f . apply ( null ) ; } else { return f . apply ( localRef . get ( ) ) ; } } finally { if ( localRef != null ) localRef . close ( ) ; }
public class RedisConnectionException { /** * Create a new { @ link RedisConnectionException } given { @ code remoteAddress } and the { @ link Throwable cause } . * @ param remoteAddress remote address . * @ param cause the nested exception . * @ return the { @ link RedisConnectionException } . * @ since 5.1 */ public static RedisConnectionException create ( String remoteAddress , Throwable cause ) { } }
if ( remoteAddress == null ) { if ( cause instanceof RedisConnectionException ) { return new RedisConnectionException ( cause . getMessage ( ) , cause . getCause ( ) ) ; } return new RedisConnectionException ( null , cause ) ; } return new RedisConnectionException ( String . format ( "Unable to connect to %s" , remoteAddress ) , cause ) ;
public class Configurator { /** * A method which does the following : * - discovers all Fields or Methods within the protocol stack which set * InetAddress , IpAddress , InetSocketAddress ( and Lists of such ) for which the user * has * * specified a default value . * - stores the resulting set of Fields and Methods in a map of the form : * Protocol - > Property - > InetAddressInfo * where InetAddressInfo instances encapsulate the InetAddress related information * of the Fields and Methods . */ public static Map < String , Map < String , InetAddressInfo > > createInetAddressMap ( List < ProtocolConfiguration > protocol_configs , List < Protocol > protocols ) throws Exception { } }
// Map protocol - > Map < String , InetAddressInfo > , where the latter is protocol specific Map < String , Map < String , InetAddressInfo > > inetAddressMap = new HashMap < > ( ) ; // collect InetAddressInfo for ( int i = 0 ; i < protocol_configs . size ( ) ; i ++ ) { ProtocolConfiguration protocol_config = protocol_configs . get ( i ) ; Protocol protocol = protocols . get ( i ) ; String protocolName = protocol . getName ( ) ; // regenerate the Properties which were destroyed during basic property processing Map < String , String > properties = new HashMap < > ( protocol_config . getProperties ( ) ) ; // check which InetAddress - related properties are * * * non - null * * * , and // create an InetAddressInfo structure for them // Method [ ] methods = protocol . getClass ( ) . getMethods ( ) ; Method [ ] methods = Util . getAllDeclaredMethodsWithAnnotations ( protocol . getClass ( ) , Property . class ) ; for ( int j = 0 ; j < methods . length ; j ++ ) { if ( methods [ j ] . isAnnotationPresent ( Property . class ) && isSetPropertyMethod ( methods [ j ] , protocol . getClass ( ) ) ) { String propertyName = PropertyHelper . getPropertyName ( methods [ j ] ) ; String propertyValue = properties . get ( propertyName ) ; // if there is a systemProperty attribute defined in the annotation , set the property value from the system property String tmp = grabSystemProp ( methods [ j ] . getAnnotation ( Property . class ) ) ; if ( tmp != null ) propertyValue = tmp ; if ( propertyValue != null && InetAddressInfo . isInetAddressRelated ( methods [ j ] ) ) { Object converted = null ; try { converted = PropertyHelper . getConvertedValue ( protocol , methods [ j ] , properties , propertyValue , false ) ; } catch ( Exception e ) { throw new Exception ( "String value could not be converted for method " + propertyName + " in " + protocolName + " with default value " + propertyValue + ".Exception is " + e , e ) ; } InetAddressInfo inetinfo = new InetAddressInfo ( protocol , methods [ j ] , properties , propertyValue , converted ) ; Map < String , InetAddressInfo > m = inetAddressMap . computeIfAbsent ( protocolName , k -> new HashMap < > ( ) ) ; m . put ( propertyName , inetinfo ) ; } } } // traverse class hierarchy and find all annotated fields and add them to the list if annotated for ( Class < ? > clazz = protocol . getClass ( ) ; clazz != null ; clazz = clazz . getSuperclass ( ) ) { Field [ ] fields = clazz . getDeclaredFields ( ) ; for ( int j = 0 ; j < fields . length ; j ++ ) { if ( fields [ j ] . isAnnotationPresent ( Property . class ) ) { String propertyName = PropertyHelper . getPropertyName ( fields [ j ] , properties ) ; String propertyValue = properties . get ( propertyName ) ; // if there is a systemProperty attribute defined in the annotation , set the property value from the system property String tmp = grabSystemProp ( fields [ j ] . getAnnotation ( Property . class ) ) ; if ( tmp != null ) propertyValue = tmp ; if ( ( propertyValue != null || ! PropertyHelper . usesDefaultConverter ( fields [ j ] ) ) && InetAddressInfo . isInetAddressRelated ( fields [ j ] ) ) { Object converted = null ; try { converted = PropertyHelper . getConvertedValue ( protocol , fields [ j ] , properties , propertyValue , false ) ; } catch ( Exception e ) { throw new Exception ( "String value could not be converted for method " + propertyName + " in " + protocolName + " with default value " + propertyValue + ".Exception is " + e , e ) ; } InetAddressInfo inetinfo = new InetAddressInfo ( protocol , fields [ j ] , properties , propertyValue , converted ) ; Map < String , InetAddressInfo > m = inetAddressMap . computeIfAbsent ( protocolName , k -> new HashMap < > ( ) ) ; m . put ( propertyName , inetinfo ) ; } // recompute } } } } return inetAddressMap ;
public class FuchsiaUtils { /** * Return the BundleCapability of a bundle exporting the package packageName . * @ param context The BundleContext * @ param packageName The package name * @ return the BundleCapability of a bundle exporting the package packageName */ private static BundleCapability getExportedPackage ( BundleContext context , String packageName ) { } }
List < BundleCapability > packages = new ArrayList < BundleCapability > ( ) ; for ( Bundle bundle : context . getBundles ( ) ) { BundleRevision bundleRevision = bundle . adapt ( BundleRevision . class ) ; for ( BundleCapability packageCapability : bundleRevision . getDeclaredCapabilities ( BundleRevision . PACKAGE_NAMESPACE ) ) { String pName = ( String ) packageCapability . getAttributes ( ) . get ( BundleRevision . PACKAGE_NAMESPACE ) ; if ( pName . equalsIgnoreCase ( packageName ) ) { packages . add ( packageCapability ) ; } } } Version max = Version . emptyVersion ; BundleCapability maxVersion = null ; for ( BundleCapability aPackage : packages ) { Version version = ( Version ) aPackage . getAttributes ( ) . get ( "version" ) ; if ( max . compareTo ( version ) <= 0 ) { max = version ; maxVersion = aPackage ; } } return maxVersion ;
public class CountSqlParser { /** * 处理WithItem * @ param withItemsList */ public void processWithItemsList ( List < WithItem > withItemsList ) { } }
if ( withItemsList != null && withItemsList . size ( ) > 0 ) { for ( WithItem item : withItemsList ) { processSelectBody ( item . getSelectBody ( ) ) ; } }
public class Nfs3 { /** * / * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . Nfs # sendMknod ( com . emc . ecs . nfsclient . nfs . NfsMknodRequest ) */ public Nfs3MknodResponse sendMknod ( NfsMknodRequest request ) throws IOException { } }
Nfs3MknodResponse response = new Nfs3MknodResponse ( ) ; _rpcWrapper . callRpcNaked ( request , response ) ; return response ;
public class JellyBuilder { /** * Captures the XML fragment generated by the given closure into dom4j DOM tree * and return the root element . * @ return null * if nothing was generated . */ public Element redirectToDom ( Closure c ) { } }
SAXContentHandler sc = new SAXContentHandler ( ) ; with ( new XMLOutput ( sc ) , c ) ; return sc . getDocument ( ) . getRootElement ( ) ;
public class AutoScalingPolicyUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AutoScalingPolicyUpdate autoScalingPolicyUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( autoScalingPolicyUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( autoScalingPolicyUpdate . getPolicyName ( ) , POLICYNAME_BINDING ) ; protocolMarshaller . marshall ( autoScalingPolicyUpdate . getTargetTrackingScalingPolicyConfiguration ( ) , TARGETTRACKINGSCALINGPOLICYCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KeyVaultClientCustomImpl { /** * List certificates in the specified vault . * @ param vaultBaseUrl * The vault name , e . g . https : / / myvault . vault . azure . net * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < CertificateItem > > listCertificatesAsync ( final String vaultBaseUrl , final ListOperationCallback < CertificateItem > serviceCallback ) { } }
return getCertificatesAsync ( vaultBaseUrl , serviceCallback ) ;
public class DoradusClient { /** * Retrieve the map of commands keyed by service name * @ return map of commands */ public Map < String , List < String > > listCommands ( ) { } }
Map < String , List < String > > result = new HashMap < String , List < String > > ( ) ; for ( String cat : restMetadataJson . keySet ( ) ) { JsonObject commands = restMetadataJson . getJsonObject ( cat ) ; List < String > names = new ArrayList < String > ( ) ; for ( String commandName : commands . keySet ( ) ) { names . add ( commandName ) ; } result . put ( cat , names ) ; } return result ;
public class ApiOvhMe { /** * Initiate an email change procedure * REST : POST / me / changeEmail * @ param newEmail [ required ] New email to associate to your account */ public OvhTask changeEmail_POST ( String newEmail ) throws IOException { } }
String qPath = "/me/changeEmail" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "newEmail" , newEmail ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class GetResponseUnmarshaller { /** * { @ inheritDoc } */ @ Override protected void onInt ( Integer val , String fieldName , JsonParser jp ) { } }
if ( resultStarted ) { found = true ; ClassUtil . setSilent ( entity , fieldName , val ) ; }
public class MetaDataService { /** * Create or replace * @ param entity entity * @ return is success */ public boolean createOrReplaceEntity ( Entity entity ) { } }
String entityName = entity . getName ( ) ; checkEntityIsEmpty ( entityName ) ; QueryPart < Entity > queryPart = new Query < Entity > ( "entities" ) . path ( entityName , true ) ; return httpClientManager . updateMetaData ( queryPart , put ( entity ) ) ;
public class RetryPolicy { /** * Specifies when retries should be aborted . Any failure that is assignable from the { @ code failures } will be result * in retries being aborted . * @ throws NullPointerException if { @ code failures } is null * @ throws IllegalArgumentException if failures is null or empty */ public RetryPolicy < R > abortOn ( List < Class < ? extends Throwable > > failures ) { } }
Assert . notNull ( failures , "failures" ) ; Assert . isTrue ( ! failures . isEmpty ( ) , "failures cannot be empty" ) ; abortConditions . add ( failurePredicateFor ( failures ) ) ; return this ;
public class ProcessEventCommand { /** * { @ inheritDoc } */ @ Override public void perform ( final Wave wave ) { } }
final JRebirthEvent event = wave . get ( EditorWaves . EVENT ) ; if ( event . eventType ( ) . name ( ) . startsWith ( "CREATE" ) ) { createBallModel ( event ) ; } else if ( event . eventType ( ) . name ( ) . startsWith ( "ACCESS" ) ) { accessBallModel ( event ) ; } else if ( event . eventType ( ) . name ( ) . startsWith ( "DESTROY" ) ) { destroyBallModel ( event ) ; }
public class EmailServiceImpl { /** * Instantiates and sets up a new { @ link SessionStrategy } instance and assigns a reference to it in the new setup object passed in . * @ param config The configuration to pull the class name for the class to instantiate . * @ param props The properties to configure the new { @ link SessionStrategy } with . * @ param newSetup The { @ link EmailSetup } reference to assign the new instance to . * @ return The new instance of the { @ link SessionStrategy } class requested . * @ throws ClassNotFoundException * @ throws InstantiationException * @ throws IllegalAccessException * @ throws EmailException * @ throws IllegalAccessException */ @ SuppressWarnings ( "unchecked" ) private SessionStrategy setSessionStrategy ( EmailConfiguration config , Dictionary < String , Object > props , EmailSetup newSetup ) throws ClassNotFoundException , InstantiationException , IllegalAccessException , EmailException , IllegalAccessException { } }
SessionStrategy strategy ; Class < ? > ssc = Class . forName ( config . getSessionStrategy ( ) ) ; if ( SessionStrategy . class . isAssignableFrom ( ssc ) ) { strategy = ( ( Class < SessionStrategy > ) ssc ) . newInstance ( ) ; strategy . configure ( props ) ; newSetup . sessionStrategy = strategy ; } else { throw new IllegalArgumentException ( config . getSessionStrategy ( ) + " is not an instance of " + SessionStrategy . class . getName ( ) ) ; } return strategy ;
public class FileUtils { /** * Create directories needed based on configuration . * @ param configuration * @ throws IOException */ public static void createDirectoriesOnWorker ( SubProcessConfiguration configuration ) throws IOException { } }
try { Path path = Paths . get ( configuration . getWorkerPath ( ) ) ; if ( ! path . toFile ( ) . exists ( ) ) { Files . createDirectories ( path ) ; LOG . info ( String . format ( "Created Folder %s " , path . toFile ( ) ) ) ; } } catch ( FileAlreadyExistsException ex ) { LOG . warn ( String . format ( " Tried to create folder %s which already existsed, this should not happen!" , configuration . getWorkerPath ( ) ) , ex ) ; }
public class ProposalLineItem { /** * Sets the baseRate value for this ProposalLineItem . * @ param baseRate * The base rate of the { @ code ProposalLineItem } in proposal currency . * < span class = " constraint Applicable " > This attribute is applicable when : < ul > < li > using * programmatic guaranteed , using sales management . < / li > < li > not using * programmatic , using sales management . < / li > < / ul > < / span > * < span class = " constraint ReadOnly " > This attribute is * read - only when : < ul > < li > using programmatic guaranteed , using sales * management . < / li > < li > not using programmatic , using sales management . < / li > < / ul > < / span > */ public void setBaseRate ( com . google . api . ads . admanager . axis . v201811 . Money baseRate ) { } }
this . baseRate = baseRate ;