signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ComponentImpl { /** * public Object setEL ( PageContext pc , String name , Object value ) { try { return set ( pc , name , * value ) ; } catch ( PageException e ) { return null ; } } */ @ Override public Object setEL ( PageContext pc , Collection . Key name , Object value ) { } }
try { return set ( pc , name , value ) ; } catch ( PageException e ) { return null ; }
public class MasterReplica { /** * Open a new connection to a Redis Master - Replica server / servers using the supplied { @ link RedisURI } and the supplied * { @ link RedisCodec codec } to encode / decode keys . * This { @ link MasterReplica } performs auto - discovery of nodes if the URI is a Redis Sentinel URI . Master / Replica URIs will * be treated as static topology and no additional hosts are discovered in such case . Redis Standalone Master / Replica will * discover the roles of the supplied { @ link RedisURI URIs } and issue commands to the appropriate node . * @ param redisClient the Redis client . * @ param codec Use this codec to encode / decode keys and values , must not be { @ literal null } . * @ param redisURIs the Redis server to connect to , must not be { @ literal null } . * @ param < K > Key type . * @ param < V > Value type . * @ return a new connection . */ public static < K , V > StatefulRedisMasterReplicaConnection < K , V > connect ( RedisClient redisClient , RedisCodec < K , V > codec , Iterable < RedisURI > redisURIs ) { } }
return new MasterReplicaConnectionWrapper < > ( MasterSlave . connect ( redisClient , codec , redisURIs ) ) ;
public class ClassUseMapper { /** * Map the AnnotationType to the ProgramElementDocs that use them as * type parameters . * @ param map the map the insert the information into . * @ param doc the doc whose type parameters are being checked . * @ param holder the holder that owns the type parameters . */ private < T extends PackageDoc > void mapAnnotations ( Map < String , List < T > > map , PackageDoc doc , T holder ) { } }
for ( AnnotationDesc annotation : doc . annotations ( ) ) { AnnotationTypeDoc annotationDoc = annotation . annotationType ( ) ; refList ( map , annotationDoc ) . add ( holder ) ; }
public class PostgreSqlExceptionTranslator { /** * Package private for testability * @ param pSqlException PostgreSQL exception * @ return translated validation exception */ MolgenisValidationException translateReadonlyViolation ( PSQLException pSqlException ) { } }
Matcher matcher = Pattern . compile ( "Updating read-only column \"?(.*?)\"? of table \"?(.*?)\"? with id \\[(.*?)] is not allowed" ) . matcher ( pSqlException . getServerErrorMessage ( ) . getMessage ( ) ) ; boolean matches = matcher . matches ( ) ; if ( ! matches ) { LOG . error ( ERROR_TRANSLATING_POSTGRES_EXC_MSG , pSqlException ) ; throw new RuntimeException ( ERROR_TRANSLATING_EXCEPTION_MSG , pSqlException ) ; } String colName = matcher . group ( 1 ) ; String tableName = matcher . group ( 2 ) ; String id = matcher . group ( 3 ) ; ConstraintViolation constraintViolation = new ConstraintViolation ( format ( "Updating read-only attribute '%s' of entity '%s' with id '%s' is not allowed." , tryGetAttributeName ( tableName , colName ) . orElse ( TOKEN_UNKNOWN ) , tryGetEntityTypeName ( tableName ) . orElse ( TOKEN_UNKNOWN ) , id ) ) ; return new MolgenisValidationException ( singleton ( constraintViolation ) ) ;
public class BuildAndPushMapper { /** * Create the voldemort key and value from the input key and value and map * it out for each of the responsible voldemort nodes * The output key is the md5 of the serialized key returned by makeKey ( ) . * The output value is the node _ id & partition _ id of the responsible node * followed by serialized value returned by makeValue ( ) OR if we have * setKeys flag on the serialized key and serialized value */ public void map ( byte [ ] keyBytes , byte [ ] valBytes , AbstractCollectorWrapper collector ) throws IOException { } }
// Compress key and values if required if ( keySerializerDefinition . hasCompression ( ) ) { keyBytes = keyCompressor . deflate ( keyBytes ) ; } if ( valueSerializerDefinition . hasCompression ( ) ) { valBytes = valueCompressor . deflate ( valBytes ) ; } // Get the output byte arrays ready to populate byte [ ] outputValue ; byte [ ] outputKey ; // Leave initial offset for ( a ) node id ( b ) partition id // since they are written later int offsetTillNow = 2 * ByteUtils . SIZE_OF_INT ; if ( getSaveKeys ( ) ) { // In order - 4 ( for node id ) + 4 ( partition id ) + 1 ( replica // type - primary | secondary | tertiary . . . ] + 4 ( key size ) // size ) + 4 ( value size ) + key + value outputValue = new byte [ valBytes . length + keyBytes . length + ByteUtils . SIZE_OF_BYTE + 4 * ByteUtils . SIZE_OF_INT ] ; // Write key length - leave byte for replica type offsetTillNow += ByteUtils . SIZE_OF_BYTE ; ByteUtils . writeInt ( outputValue , keyBytes . length , offsetTillNow ) ; // Write value length offsetTillNow += ByteUtils . SIZE_OF_INT ; ByteUtils . writeInt ( outputValue , valBytes . length , offsetTillNow ) ; // Write key offsetTillNow += ByteUtils . SIZE_OF_INT ; System . arraycopy ( keyBytes , 0 , outputValue , offsetTillNow , keyBytes . length ) ; // Write value offsetTillNow += keyBytes . length ; System . arraycopy ( valBytes , 0 , outputValue , offsetTillNow , valBytes . length ) ; // Generate MR key - upper 8 bytes of 16 byte md5 outputKey = ByteUtils . copy ( md5er . digest ( keyBytes ) , 0 , 2 * ByteUtils . SIZE_OF_INT ) ; } else { // In order - 4 ( for node id ) + 4 ( partition id ) + value outputValue = new byte [ valBytes . length + 2 * ByteUtils . SIZE_OF_INT ] ; // Write value System . arraycopy ( valBytes , 0 , outputValue , offsetTillNow , valBytes . length ) ; // Generate MR key - 16 byte md5 outputKey = md5er . digest ( keyBytes ) ; } // Generate partition and node list this key is destined for List < Integer > partitionList = routingStrategy . getPartitionList ( keyBytes ) ; Node [ ] partitionToNode = routingStrategy . getPartitionToNode ( ) ; // In buildPrimaryReplicasOnly mode , we want to push out no more than a single replica // for each key . Otherwise ( in vintage mode ) , we push out one copy per replica . int numberOfReplicasToPushTo = getBuildPrimaryReplicasOnly ( ) ? 1 : partitionList . size ( ) ; for ( int replicaType = 0 ; replicaType < numberOfReplicasToPushTo ; replicaType ++ ) { // Node id ByteUtils . writeInt ( outputValue , partitionToNode [ partitionList . get ( replicaType ) ] . getId ( ) , 0 ) ; if ( getSaveKeys ( ) ) { // Primary partition id ByteUtils . writeInt ( outputValue , partitionList . get ( 0 ) , ByteUtils . SIZE_OF_INT ) ; // Replica type ByteUtils . writeBytes ( outputValue , replicaType , 2 * ByteUtils . SIZE_OF_INT , ByteUtils . SIZE_OF_BYTE ) ; } else { // Partition id ByteUtils . writeInt ( outputValue , partitionList . get ( replicaType ) , ByteUtils . SIZE_OF_INT ) ; } collector . collect ( outputKey , outputValue ) ; } md5er . reset ( ) ;
public class VirtualABoxStatistics { /** * Returns one triple count from a particular mapping . * @ param datasourceId * The data source identifier . * @ param mappingId * The mapping identifier . * @ return The number of triples . */ public int getStatistics ( String datasourceId , String mappingId ) { } }
final HashMap < String , Integer > mappingStat = getStatistics ( datasourceId ) ; int triplesCount = mappingStat . get ( mappingId ) . intValue ( ) ; return triplesCount ;
public class ClientSideHandlerScriptRequestHandler { /** * Determines whether a response should get a 304 response and empty body , * according to etags and if - modified - since headers . * @ param request * @ param scriptEtag * @ return */ private boolean useNotModifiedHeader ( HttpServletRequest request , String scriptEtag ) { } }
long modifiedHeader = - 1 ; try { modifiedHeader = request . getDateHeader ( HEADER_IF_MODIFIED ) ; if ( modifiedHeader != - 1 ) modifiedHeader -= modifiedHeader % 1000 ; } catch ( RuntimeException ex ) { } String eTag = request . getHeader ( HEADER_IF_NONE ) ; if ( modifiedHeader == - 1 ) { return scriptEtag . equals ( eTag ) ; } else if ( null == eTag ) { return modifiedHeader <= START_TIME ; } else { return scriptEtag . equals ( eTag ) && modifiedHeader <= START_TIME ; }
public class AntClassLoader { /** * Get the manifest from the given jar , if it is indeed a jar and it has a * manifest * @ param container the File from which a manifest is required . * @ return the jar ' s manifest or null is the container is not a jar or it * has no manifest . * @ exception IOException if the manifest cannot be read . */ private Manifest getJarManifest ( File container ) throws IOException { } }
if ( container . isDirectory ( ) ) { return null ; } JarFile jarFile = ( JarFile ) jarFiles . get ( container ) ; if ( jarFile == null ) { return null ; } return jarFile . getManifest ( ) ;
public class InternalScanner { /** * Finds matches in a physical directory on a filesystem . Examines all * files within a directory - if the File object is not a directory , and ends with < i > . class < / i > * the file is loaded and tested to see if it is acceptable according to the Test . Operates * recursively to find classes within a folder structure matching the package structure . * @ param test a Test used to filter the classes that are discovered * @ param parent the package name up to this directory in the package hierarchy . E . g . if * / classes is in the classpath and we wish to examine files in / classes / org / apache then * the values of < i > parent < / i > would be < i > org / apache < / i > * @ param location a File object representing a directory * @ return List of packages to export . */ List < String > loadImplementationsInDirectory ( Test test , String parent , File location ) { } }
log . debug ( "Scanning directory " + location . getAbsolutePath ( ) + " parent: '" + parent + "'." ) ; File [ ] files = location . listFiles ( ) ; List < String > localClsssOrPkgs = new ArrayList < String > ( ) ; for ( File file : files ) { final String packageOrClass ; if ( parent == null || parent . length ( ) == 0 ) { packageOrClass = file . getName ( ) ; } else { packageOrClass = parent + "/" + file . getName ( ) ; } if ( file . isDirectory ( ) ) { localClsssOrPkgs . addAll ( loadImplementationsInDirectory ( test , packageOrClass , file ) ) ; // If the parent is empty , then assume the directory ' s jars should be searched } else if ( "" . equals ( parent ) && file . getName ( ) . endsWith ( ".jar" ) && test . matchesJar ( file . getName ( ) ) ) { localClsssOrPkgs . addAll ( loadImplementationsInJar ( test , file ) ) ; } else { String pkg = packageOrClass ; if ( pkg . endsWith ( SUFFIX_CLASS ) ) localClsssOrPkgs . add ( pkg ) ; } } return localClsssOrPkgs ;
public class CmsResourceUtil { /** * Returns the the lock for the given resource . < p > * @ return the lock the given resource */ public CmsLock getLock ( ) { } }
if ( m_lock == null ) { try { m_lock = getCms ( ) . getLock ( m_resource ) ; } catch ( Throwable e ) { m_lock = CmsLock . getNullLock ( ) ; LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } return m_lock ;
public class ForkJoinTask { /** * Completes this task , and if not already aborted or cancelled , * returning the given value as the result of subsequent * invocations of { @ code join } and related operations . This method * may be used to provide results for asynchronous tasks , or to * provide alternative handling for tasks that would not otherwise * complete normally . Its use in other situations is * discouraged . This method is overridable , but overridden * versions must invoke { @ code super } implementation to maintain * guarantees . * @ param value the result value for this task */ public void complete ( V value ) { } }
try { setRawResult ( value ) ; } catch ( Throwable rex ) { setExceptionalCompletion ( rex ) ; return ; } setCompletion ( NORMAL ) ;
public class FilePath { /** * Gets the system ' s user dir ( pwd ) and convert it to the server ' s format . */ public static String getPwd ( ) { } }
String path = getUserDir ( ) ; path = path . replace ( getFileSeparatorChar ( ) , '/' ) ; if ( isWindows ( ) ) { path = convertFromWindowsPath ( path ) ; } return path ;
public class Check { /** * Checks to see if a vulnerability has been identified with a CVSS score * that is above the threshold set in the configuration . * @ param dependencies the list of dependency objects * @ throws BuildException thrown if a CVSS score is found that is higher * than the threshold set */ private void checkForFailure ( Dependency [ ] dependencies ) throws BuildException { } }
final StringBuilder ids = new StringBuilder ( ) ; for ( Dependency d : dependencies ) { for ( Vulnerability v : d . getVulnerabilities ( ) ) { if ( ( v . getCvssV2 ( ) != null && v . getCvssV2 ( ) . getScore ( ) >= failBuildOnCVSS ) || ( v . getCvssV3 ( ) != null && v . getCvssV3 ( ) . getBaseScore ( ) >= failBuildOnCVSS ) ) { if ( ids . length ( ) == 0 ) { ids . append ( v . getName ( ) ) ; } else { ids . append ( ", " ) . append ( v . getName ( ) ) ; } } } } if ( ids . length ( ) > 0 ) { final String msg ; if ( showSummary ) { msg = String . format ( "%n%nDependency-Check Failure:%n" + "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than or equal to '%.1f': %s%n" + "See the dependency-check report for more details.%n%n" , failBuildOnCVSS , ids . toString ( ) ) ; } else { msg = String . format ( "%n%nDependency-Check Failure:%n" + "One or more dependencies were identified with vulnerabilities.%n%n" + "See the dependency-check report for more details.%n%n" ) ; } throw new BuildException ( msg ) ; }
public class ColumnDatapointIterator { /** * Copy this value to the output and advance to the next one . * @ param compQualifier * @ param compValue * @ return true if there is more data left in this column */ public void writeToBuffers ( ByteBufferList compQualifier , ByteBufferList compValue ) { } }
compQualifier . add ( qualifier , qualifier_offset , current_qual_length ) ; compValue . add ( value , value_offset , current_val_length ) ;
public class TextUtility { /** * 把表示数字含义的字符串转成整形 * @ param str 要转换的字符串 * @ return 如果是有意义的整数 , 则返回此整数值 。 否则 , 返回 - 1。 */ public static int cint ( String str ) { } }
if ( str != null ) try { int i = new Integer ( str ) . intValue ( ) ; return i ; } catch ( NumberFormatException e ) { } return - 1 ;
public class CmsJlanDiskInterface { /** * Converts a CIFS path to an OpenCms path by converting backslashes to slashes and translating special characters in the file name . < p > * @ param path the path to transform * @ return the OpenCms path for the given path */ protected static String getCmsPath ( String path ) { } }
String slashPath = path . replace ( '\\' , '/' ) ; // split path into components , translate each of them separately , then combine them again at the end String [ ] segments = slashPath . split ( "/" ) ; List < String > nonEmptySegments = new ArrayList < String > ( ) ; for ( String segment : segments ) { if ( segment . length ( ) > 0 ) { String translatedSegment = "*" . equals ( segment ) ? "*" : OpenCms . getResourceManager ( ) . getFileTranslator ( ) . translateResource ( segment ) ; nonEmptySegments . add ( translatedSegment ) ; } } String result = "/" + Joiner . on ( "/" ) . join ( nonEmptySegments ) ; return result ;
public class DefaultProcedureManager { /** * Create a statement like : * " update < table > set { < each - column = ? > . . . } where { < pkey - column = ? > . . . } * for a replicated table . */ private static String generateCrudReplicatedUpdate ( Table table , Constraint pkey ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "UPDATE " + table . getTypeName ( ) + " SET " ) ; generateCrudExpressionColumns ( table , sb ) ; generateCrudPKeyWhereClause ( null , pkey , sb ) ; sb . append ( ';' ) ; return sb . toString ( ) ;
public class GetObjectRequest { /** * Sets the optional progress listener for receiving updates about object * download status , and returns this updated object so that additional method * calls can be chained together . * @ param progressListener * The legacy progress listener that is used exclusively for Amazon S3 client . * @ return This updated GetObjectRequest object . * @ deprecated use { @ link # withGeneralProgressListener ( ProgressListener ) } * instead . */ @ Deprecated public GetObjectRequest withProgressListener ( com . ibm . cloud . objectstorage . services . s3 . model . ProgressListener progressListener ) { } }
setProgressListener ( progressListener ) ; return this ;
public class DeepCopy { /** * Returns a copy of the object , or null if the object cannot * be serialized . * @ param orig an < code > Object < / code > value * @ return a deep copy of that Object * @ exception NotSerializableException if an error occurs */ public static Object copy ( Object orig ) throws NotSerializableException { } }
Object obj = null ; try { // Write the object out to a byte array ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; ObjectOutputStream out = new ObjectOutputStream ( bos ) ; out . writeObject ( orig ) ; out . flush ( ) ; out . close ( ) ; // Make an input stream from the byte array and read // a copy of the object back in . ObjectInputStream in = new ObjectInputStream ( new ByteArrayInputStream ( bos . toByteArray ( ) ) ) ; obj = in . readObject ( ) ; } catch ( NotSerializableException e ) { throw e ; } catch ( IOException e ) { e . printStackTrace ( ) ; } catch ( ClassNotFoundException cnfe ) { cnfe . printStackTrace ( ) ; } return obj ;
public class Matrix3d { /** * Extract the Euler angles from the rotation represented by < code > this < / code > matrix and store the extracted Euler angles in < code > dest < / code > . * This method assumes that < code > this < / code > matrix only represents a rotation without scaling . * Note that the returned Euler angles must be applied in the order < code > Z * Y * X < / code > to obtain the identical matrix . * This means that calling { @ link Matrix3d # rotateZYX ( double , double , double ) } using the obtained Euler angles will yield * the same rotation as the original matrix from which the Euler angles were obtained , so in the below code the matrix * < code > m2 < / code > should be identical to < code > m < / code > ( disregarding possible floating - point inaccuracies ) . * < pre > * Matrix3d m = . . . ; / / & lt ; - matrix only representing rotation * Matrix3d n = new Matrix3d ( ) ; * n . rotateZYX ( m . getEulerAnglesZYX ( new Vector3d ( ) ) ) ; * < / pre > * Reference : < a href = " http : / / nghiaho . com / ? page _ id = 846 " > http : / / nghiaho . com / < / a > * @ param dest * will hold the extracted Euler angles * @ return dest */ public Vector3d getEulerAnglesZYX ( Vector3d dest ) { } }
dest . x = ( float ) Math . atan2 ( m12 , m22 ) ; dest . y = ( float ) Math . atan2 ( - m02 , Math . sqrt ( m12 * m12 + m22 * m22 ) ) ; dest . z = ( float ) Math . atan2 ( m01 , m00 ) ; return dest ;
public class Whitelist { /** * Test if the supplied attribute is allowed by this whitelist for this tag * @ param tagName tag to consider allowing the attribute in * @ param el element under test , to confirm protocol * @ param attr attribute under test * @ return true if allowed */ protected boolean isSafeAttribute ( String tagName , Element el , Attribute attr ) { } }
TagName tag = TagName . valueOf ( tagName ) ; AttributeKey key = AttributeKey . valueOf ( attr . getKey ( ) ) ; Set < AttributeKey > okSet = attributes . get ( tag ) ; if ( okSet != null && okSet . contains ( key ) ) { if ( protocols . containsKey ( tag ) ) { Map < AttributeKey , Set < Protocol > > attrProts = protocols . get ( tag ) ; // ok if not defined protocol ; otherwise test return ! attrProts . containsKey ( key ) || testValidProtocol ( el , attr , attrProts . get ( key ) ) ; } else { // attribute found , no protocols defined , so OK return true ; } } // might be an enforced attribute ? Map < AttributeKey , AttributeValue > enforcedSet = enforcedAttributes . get ( tag ) ; if ( enforcedSet != null ) { Attributes expect = getEnforcedAttributes ( tagName ) ; String attrKey = attr . getKey ( ) ; if ( expect . hasKeyIgnoreCase ( attrKey ) ) { return expect . getIgnoreCase ( attrKey ) . equals ( attr . getValue ( ) ) ; } } // no attributes defined for tag , try : all tag return ! tagName . equals ( ":all" ) && isSafeAttribute ( ":all" , el , attr ) ;
public class CDownloadRequest { /** * Get the HTTP headers to be used for download request . * Default implementation only handles Range header . * @ return The headers */ public Headers getHttpHeaders ( ) { } }
Headers headers = new Headers ( ) ; if ( byteRange != null ) { StringBuilder rangeBuilder = new StringBuilder ( "bytes=" ) ; long start ; if ( byteRange . offset >= 0 ) { rangeBuilder . append ( byteRange . offset ) ; start = byteRange . offset ; } else { start = 1 ; } rangeBuilder . append ( "-" ) ; if ( byteRange . length > 0 ) { rangeBuilder . append ( start + byteRange . length - 1 ) ; } Header rangeHeader = new BasicHeader ( "Range" , rangeBuilder . toString ( ) ) ; headers . addHeader ( rangeHeader ) ; } return headers ;
public class NodeUtil { /** * Returns the immediately enclosing target node for a given target node , or null if none found . * @ see # getRootTarget ( Node ) for examples * @ param targetNode */ @ Nullable private static Node getEnclosingTarget ( Node targetNode ) { } }
checkState ( checkNotNull ( targetNode ) . isValidAssignmentTarget ( ) , targetNode ) ; Node parent = checkNotNull ( targetNode . getParent ( ) , targetNode ) ; boolean targetIsFirstChild = parent . getFirstChild ( ) == targetNode ; if ( parent . isDefaultValue ( ) || parent . isRest ( ) ) { // in ` ( [ something = targetNode ] = x ) ` targetNode isn ' t actually acting // as a target . checkState ( targetIsFirstChild , parent ) ; // The DEFAULT _ VALUE or REST occupies the place where the assignment target it contains would // otherwise be in the AST , so pretend it is the target for the logic below . targetNode = parent ; parent = checkNotNull ( targetNode . getParent ( ) ) ; targetIsFirstChild = targetNode == parent . getFirstChild ( ) ; } switch ( parent . getToken ( ) ) { case ARRAY_PATTERN : // e . g . ( [ targetNode ] = something ) return parent ; case OBJECT_PATTERN : // e . g . ( { . . . rest } = something ) ; return parent ; case COMPUTED_PROP : // e . g . ( { [ expression ] : targetNode } = something ) // e . g . ( { [ expression ] : targetNode = default } = something ) // make sure the effective target ( targetNode or DEFAULT _ VALUE containing it ) // isn ' t the expression part checkState ( ! targetIsFirstChild , parent ) ; // otherwise the same as STRING _ KEY so fall through case STRING_KEY : // e . g . ( { parent : targetNode } = something ) Node grandparent = checkNotNull ( parent . getParent ( ) , parent ) ; checkState ( grandparent . isObjectPattern ( ) , grandparent ) ; return grandparent ; case PARAM_LIST : // e . g . ` function foo ( targetNode ) { } ` case LET : case CONST : case VAR : // non - destructured declarations // e . g . ` let targetNode = 3 ; ` return null ; case FUNCTION : case CLASS : // e . g . ` function targetNode ( ) { } ` // e . g . ` class targetNode { } ` checkState ( targetIsFirstChild , targetNode ) ; return null ; case FOR_IN : case FOR_OF : case FOR_AWAIT_OF : // e . g . ` for ( { length } in obj ) { } ` / / targetNode is ` { length } ` // e . g . ` for ( { length } of obj ) { } ` / / targetNode is ` { length } ` checkState ( targetIsFirstChild , targetNode ) ; return null ; case DESTRUCTURING_LHS : // destructured declarations // e . g . ` let [ a ] = 3 ` ; / / targetNode is ` [ a ] ` checkState ( targetIsFirstChild , targetNode ) ; return null ; case IMPORT : // e . g . ` import targetNode from ' . / foo / bar ' ; ` return null ; case IMPORT_SPEC : // e . g . ` import { bar as targetNode } from ' . / foo / bar ' ; ` // e . g . ` import { targetNode } from ' . / foo / bar ' ; ` / / AST will have { targetNode as targetNode } checkState ( ! targetIsFirstChild , parent ) ; return null ; case CATCH : // e . g . ` try { } catch ( foo ) { } ` return null ; default : // e . g . targetNode = something checkState ( isAssignmentOp ( parent ) && targetIsFirstChild , parent ) ; return null ; }
public class ParsedAddressGrouping { /** * Across an address prefixes are : * IPv6 : ( null ) : . . . : ( null ) : ( 1 to 16 ) : ( 0 ) : . . . : ( 0) * or IPv4 : . . . ( null ) . ( 1 to 8 ) . ( 0 ) . . . */ public static Integer getSegmentPrefixLength ( int bitsPerSegment , Integer prefixLength , int segmentIndex ) { } }
if ( prefixLength != null ) { return getPrefixedSegmentPrefixLength ( bitsPerSegment , prefixLength , segmentIndex ) ; } return null ;
public class Funcs { /** * Returns a function that adds its argument to a map , using a supplied function to determine the key . * @ param map The map to modify * @ param keyBuilder A function to determine a key for each value * @ return A new function that adds its argument to < code > map < / code > by using * < code > keyBuilder . apply ( argument ) < / code > to determine the key . */ public static < T , K > Function < T , Void > addTo ( final Map < K , T > map , final Function < ? super T , ? extends K > keyMaker ) { } }
return new Function < T , Void > ( ) { public Void apply ( T arg ) { map . put ( keyMaker . apply ( arg ) , arg ) ; return null ; } } ;
public class Assistant { /** * Create user input example . * Add a new user input example to an intent . * This operation is limited to 1000 requests per 30 minutes . For more information , see * * Rate limiting * * . * @ param createExampleOptions the { @ link CreateExampleOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Example } */ public ServiceCall < Example > createExample ( CreateExampleOptions createExampleOptions ) { } }
Validator . notNull ( createExampleOptions , "createExampleOptions cannot be null" ) ; String [ ] pathSegments = { "v1/workspaces" , "intents" , "examples" } ; String [ ] pathParameters = { createExampleOptions . workspaceId ( ) , createExampleOptions . intent ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "conversation" , "v1" , "createExample" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; final JsonObject contentJson = new JsonObject ( ) ; contentJson . addProperty ( "text" , createExampleOptions . text ( ) ) ; if ( createExampleOptions . mentions ( ) != null ) { contentJson . add ( "mentions" , GsonSingleton . getGson ( ) . toJsonTree ( createExampleOptions . mentions ( ) ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Example . class ) ) ;
public class ReferenceCache { /** * Create a reference factory of the given type . * @ param type type of reference factory * @ return a reference factory */ private final ReferenceFactory < K , V > toFactory ( Type type ) { } }
switch ( type ) { case Hard : return new HardReferenceFactory ( ) ; case Weak : return new WeakReferenceFactory ( ) ; case Soft : return new SoftReferenceFactory ( ) ; default : return null ; }
public class Signatures { /** * Finds the best method for the given arguments . * @ param clazz * @ param name * @ param args * @ return method * @ throws AmbiguousSignatureMatchException if multiple methods match equally */ public static Method bestMethod ( Class < ? > clazz , String name , Object [ ] args ) throws AmbiguousMethodMatchException { } }
return bestMethod ( collectMethods ( clazz , name ) , args ) ;
public class ImportApiKeysRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ImportApiKeysRequest importApiKeysRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( importApiKeysRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( importApiKeysRequest . getBody ( ) , BODY_BINDING ) ; protocolMarshaller . marshall ( importApiKeysRequest . getFormat ( ) , FORMAT_BINDING ) ; protocolMarshaller . marshall ( importApiKeysRequest . getFailOnWarnings ( ) , FAILONWARNINGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class UriEscaper { /** * Escapes a string as a URI * @ param url the path to escape * @ param strict whether or not to do strict escaping * @ return the escaped string */ public static String escape ( final String url , final boolean strict ) { } }
return ( strict ? STRICT_ESCAPER : ESCAPER ) . escape ( url ) ;
public class SDMath { /** * see { @ link # eye ( String , int , int , DataType , int . . . ) } */ public SDVariable eye ( int rows , int cols , DataType dataType , int ... batchDimension ) { } }
return eye ( null , rows , cols , dataType , batchDimension ) ;
public class WRadioButtonSelectExample { /** * Make a simple editable example without a frame . */ private void makeFramelessExample ( ) { } }
add ( new WHeading ( HeadingLevel . H3 , "WRadioButtonSelect without its frame" ) ) ; add ( new ExplanatoryText ( "When a WRadioButtonSelect is frameless it loses some of its coherence, especially when its WLabel is hidden or " + "replaced by a toolTip. Using a frameless WRadioButtonSelect is useful within an existing WFieldLayout as it can provide a more " + "consistent user interface but only if it has a relatively small number of options." ) ) ; final WRadioButtonSelect select = new SelectWithSelection ( "australian_state" ) ; select . setFrameless ( true ) ; add ( new WLabel ( "Frameless with default selection" , select ) ) ; add ( select ) ;
public class JDBCConnection { /** * # ifdef JAVA4 */ public synchronized PreparedStatement prepareStatement ( String sql , int resultSetType , int resultSetConcurrency , int resultSetHoldability ) throws SQLException { } }
checkClosed ( ) ; try { return new JDBCPreparedStatement ( this , sql , resultSetType , resultSetConcurrency , resultSetHoldability , ResultConstants . RETURN_NO_GENERATED_KEYS , null , null ) ; } catch ( HsqlException e ) { throw Util . sqlException ( e ) ; }
public class PathNormalizer { /** * Removes leading and trailing separators from a path , and removes double * separators ( / / is replaced by / ) . * @ param path * the path to normalize * @ return the normalized path */ public static final String normalizePath ( String path ) { } }
String normalizedPath = path . replaceAll ( "//" , JawrConstant . URL_SEPARATOR ) ; StringTokenizer tk = new StringTokenizer ( normalizedPath , JawrConstant . URL_SEPARATOR ) ; StringBuilder sb = new StringBuilder ( ) ; while ( tk . hasMoreTokens ( ) ) { sb . append ( tk . nextToken ( ) ) ; if ( tk . hasMoreTokens ( ) ) sb . append ( JawrConstant . URL_SEPARATOR ) ; } return sb . toString ( ) ;
public class PseudoNthSpecifierChecker { /** * Add { @ code nth - last - of - type } elements . * @ see < a href = " http : / / www . w3 . org / TR / css3 - selectors / # nth - last - of - type - pseudo " > < code > : nth - last - of - type < / code > pseudo - class < / a > */ private void addNthLastOfType ( ) { } }
for ( Node node : nodes ) { int count = 1 ; Node n = DOMHelper . getNextSiblingElement ( node ) ; while ( n != null ) { if ( DOMHelper . getNodeName ( n ) . equals ( DOMHelper . getNodeName ( node ) ) ) { count ++ ; } n = DOMHelper . getNextSiblingElement ( n ) ; } if ( specifier . isMatch ( count ) ) { result . add ( node ) ; } }
public class LongIntSortedVector { /** * Sets all values in this vector to those in the other vector . */ public void set ( LongIntSortedVector other ) { } }
this . used = other . used ; this . indices = LongArrays . copyOf ( other . indices ) ; this . values = IntArrays . copyOf ( other . values ) ;
public class StringIterate { /** * Transform the int code point elements to a new string using the specified function { @ code function } . * @ since 7.0 */ public static String collectCodePoint ( String string , CodePointFunction function ) { } }
int size = string . length ( ) ; StringBuilder builder = new StringBuilder ( size ) ; for ( int i = 0 ; i < size ; ) { int codePoint = string . codePointAt ( i ) ; builder . appendCodePoint ( function . valueOf ( codePoint ) ) ; i += Character . charCount ( codePoint ) ; } return builder . toString ( ) ;
public class DockerAgentUtils { /** * Retrieves from all the Jenkins agents all the docker images , which have been registered for a specific build - info ID * Only images for which manifests have been captured are returned . * @ param buildInfoId * @ return * @ throws IOException * @ throws InterruptedException */ public static List < DockerImage > getDockerImagesFromAgents ( final int buildInfoId , TaskListener listener ) throws IOException , InterruptedException { } }
List < DockerImage > dockerImages = new ArrayList < DockerImage > ( ) ; // Collect images from the master : dockerImages . addAll ( getAndDiscardImagesByBuildId ( buildInfoId ) ) ; // Collect images from all the agents : List < Node > nodes = Jenkins . getInstance ( ) . getNodes ( ) ; for ( Node node : nodes ) { if ( node == null || node . getChannel ( ) == null ) { continue ; } try { List < DockerImage > partialDockerImages = node . getChannel ( ) . call ( new MasterToSlaveCallable < List < DockerImage > , IOException > ( ) { public List < DockerImage > call ( ) throws IOException { List < DockerImage > dockerImages = new ArrayList < DockerImage > ( ) ; dockerImages . addAll ( getAndDiscardImagesByBuildId ( buildInfoId ) ) ; return dockerImages ; } } ) ; dockerImages . addAll ( partialDockerImages ) ; } catch ( Exception e ) { listener . getLogger ( ) . println ( "Could not collect docker images from Jenkins node '" + node . getDisplayName ( ) + "' due to: " + e . getMessage ( ) ) ; } } return dockerImages ;
public class X509Utils { /** * Creates a new client certificate PKCS # 12 and PEM store . Any existing * stores are destroyed . * @ param clientMetadata a container for dynamic parameters needed for generation * @ param caPrivateKey * @ param caCert * @ param targetFolder * @ return */ public static X509Certificate newClientCertificate ( X509Metadata clientMetadata , PrivateKey caPrivateKey , X509Certificate caCert , File targetFolder ) { } }
try { KeyPair pair = newKeyPair ( ) ; X500Name userDN = buildDistinguishedName ( clientMetadata ) ; X500Name issuerDN = new X500Name ( PrincipalUtil . getIssuerX509Principal ( caCert ) . getName ( ) ) ; // create a new certificate signed by the Fathom CA certificate X509v3CertificateBuilder certBuilder = new JcaX509v3CertificateBuilder ( issuerDN , BigInteger . valueOf ( System . currentTimeMillis ( ) ) , clientMetadata . notBefore , clientMetadata . notAfter , userDN , pair . getPublic ( ) ) ; JcaX509ExtensionUtils extUtils = new JcaX509ExtensionUtils ( ) ; certBuilder . addExtension ( X509Extension . subjectKeyIdentifier , false , extUtils . createSubjectKeyIdentifier ( pair . getPublic ( ) ) ) ; certBuilder . addExtension ( X509Extension . basicConstraints , false , new BasicConstraints ( false ) ) ; certBuilder . addExtension ( X509Extension . authorityKeyIdentifier , false , extUtils . createAuthorityKeyIdentifier ( caCert . getPublicKey ( ) ) ) ; certBuilder . addExtension ( X509Extension . keyUsage , true , new KeyUsage ( KeyUsage . keyEncipherment | KeyUsage . digitalSignature ) ) ; if ( ! Strings . isNullOrEmpty ( clientMetadata . emailAddress ) ) { GeneralNames subjectAltName = new GeneralNames ( new GeneralName ( GeneralName . rfc822Name , clientMetadata . emailAddress ) ) ; certBuilder . addExtension ( X509Extension . subjectAlternativeName , false , subjectAltName ) ; } ContentSigner signer = new JcaContentSignerBuilder ( SIGNING_ALGORITHM ) . setProvider ( BC ) . build ( caPrivateKey ) ; X509Certificate userCert = new JcaX509CertificateConverter ( ) . setProvider ( BC ) . getCertificate ( certBuilder . build ( signer ) ) ; PKCS12BagAttributeCarrier bagAttr = ( PKCS12BagAttributeCarrier ) pair . getPrivate ( ) ; bagAttr . setBagAttribute ( PKCSObjectIdentifiers . pkcs_9_at_localKeyId , extUtils . createSubjectKeyIdentifier ( pair . getPublic ( ) ) ) ; // confirm the validity of the user certificate userCert . checkValidity ( ) ; userCert . verify ( caCert . getPublicKey ( ) ) ; userCert . getIssuerDN ( ) . equals ( caCert . getSubjectDN ( ) ) ; // verify user certificate chain verifyChain ( userCert , caCert ) ; targetFolder . mkdirs ( ) ; // save certificate , stamped with unique name String date = new SimpleDateFormat ( "yyyyMMdd" ) . format ( new Date ( ) ) ; String id = date ; File certFile = new File ( targetFolder , id + ".cer" ) ; int count = 0 ; while ( certFile . exists ( ) ) { id = date + "_" + Character . toString ( ( char ) ( 0x61 + count ) ) ; certFile = new File ( targetFolder , id + ".cer" ) ; count ++ ; } // save user private key , user certificate and CA certificate to a PKCS # 12 store File p12File = new File ( targetFolder , clientMetadata . commonName + ".p12" ) ; if ( p12File . exists ( ) ) { p12File . delete ( ) ; } KeyStore userStore = openKeyStore ( p12File , clientMetadata . password ) ; userStore . setKeyEntry ( MessageFormat . format ( "Fathom ({0}) {1} {2}" , clientMetadata . serverHostname , clientMetadata . userDisplayname , id ) , pair . getPrivate ( ) , null , new Certificate [ ] { userCert } ) ; userStore . setCertificateEntry ( MessageFormat . format ( "Fathom ({0}) Certificate Authority" , clientMetadata . serverHostname ) , caCert ) ; saveKeyStore ( p12File , userStore , clientMetadata . password ) ; // save user private key , user certificate , and CA certificate to a PEM store File pemFile = new File ( targetFolder , clientMetadata . commonName + ".pem" ) ; if ( pemFile . exists ( ) ) { pemFile . delete ( ) ; } PEMWriter pemWriter = new PEMWriter ( new FileWriter ( pemFile ) ) ; pemWriter . writeObject ( pair . getPrivate ( ) , "DES-EDE3-CBC" , clientMetadata . password . toCharArray ( ) , new SecureRandom ( ) ) ; pemWriter . writeObject ( userCert ) ; pemWriter . writeObject ( caCert ) ; pemWriter . flush ( ) ; pemWriter . close ( ) ; // save certificate after successfully creating the key stores saveCertificate ( userCert , certFile ) ; // update serial number in metadata object clientMetadata . serialNumber = userCert . getSerialNumber ( ) . toString ( ) ; return userCert ; } catch ( Throwable t ) { throw new RuntimeException ( "Failed to generate client certificate!" , t ) ; }
public class JdbcUtil { /** * Safely closes resources and logs errors . * @ param stmt Statement to close */ public static void close ( Statement stmt ) { } }
if ( stmt != null ) { try { stmt . close ( ) ; } catch ( SQLException ex ) { logger . error ( "" , ex ) ; } }
public class DateOfBirthTypeImpl { /** * { @ inheritDoc } */ @ Override public void setDate ( int year , int month , int dayOfMonth ) { } }
this . setDate ( new LocalDate ( year , month , dayOfMonth ) ) ;
public class NetworkWatchersInner { /** * Get the last completed troubleshooting result on a specified resource . * @ param resourceGroupName The name of the resource group . * @ param networkWatcherName The name of the network watcher resource . * @ param targetResourceId The target resource ID to query the troubleshooting result . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the TroubleshootingResultInner object if successful . */ public TroubleshootingResultInner beginGetTroubleshootingResult ( String resourceGroupName , String networkWatcherName , String targetResourceId ) { } }
return beginGetTroubleshootingResultWithServiceResponseAsync ( resourceGroupName , networkWatcherName , targetResourceId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class GroupProcessor { /** * @ see org . newdawn . slick . svg . inkscape . ElementProcessor # process ( org . newdawn . slick . svg . Loader , org . w3c . dom . Element , org . newdawn . slick . svg . Diagram , org . newdawn . slick . geom . Transform ) */ public void process ( Loader loader , Element element , Diagram diagram , Transform t ) throws ParsingException { } }
Transform transform = Util . getTransform ( element ) ; transform = new Transform ( t , transform ) ; loader . loadChildren ( element , transform ) ;
public class LogAnalyticsDataClientImpl { /** * Execute an Analytics query . * Executes an Analytics query for data . [ Here ] ( https : / / dev . loganalytics . io / documentation / Using - the - API ) is an example for using POST with an Analytics query . * @ param workspaceId ID of the workspace . This is Workspace ID from the Properties blade in the Azure portal . * @ param body The Analytics query . Learn more about the [ Analytics query syntax ] ( https : / / azure . microsoft . com / documentation / articles / app - insights - analytics - reference / ) * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the QueryResults object */ public Observable < QueryResults > queryAsync ( String workspaceId , QueryBody body ) { } }
return queryWithServiceResponseAsync ( workspaceId , body ) . map ( new Func1 < ServiceResponse < QueryResults > , QueryResults > ( ) { @ Override public QueryResults call ( ServiceResponse < QueryResults > response ) { return response . body ( ) ; } } ) ;
public class ArrayHeap { /** * On the assumption that * leftChild ( entry ) and rightChild ( entry ) satisfy the heap property , * make sure that the heap at entry satisfies this property by possibly * percolating the element o downwards . I ' ve replaced the obvious * recursive formulation with an iterative one to gain ( marginal ) speed */ private void heapifyDown ( HeapEntry < E > entry ) { } }
// int size = size ( ) ; HeapEntry < E > currentEntry = entry ; HeapEntry < E > minEntry ; // = null ; do { minEntry = currentEntry ; HeapEntry < E > leftEntry = leftChild ( currentEntry ) ; if ( leftEntry != null ) { if ( compare ( minEntry , leftEntry ) > 0 ) { minEntry = leftEntry ; } } HeapEntry < E > rightEntry = rightChild ( currentEntry ) ; if ( rightEntry != null ) { if ( compare ( minEntry , rightEntry ) > 0 ) { minEntry = rightEntry ; } } if ( minEntry != currentEntry ) { // Swap min and current swap ( minEntry , currentEntry ) ; // at start of next loop , we set currentIndex to largestIndex // this indexation now holds current , so it is unchanged } } while ( minEntry != currentEntry ) ; // System . err . println ( " Done with heapify down " ) ; // verify ( ) ;
public class FeatureExpressions { /** * Decorate a boolean feature to make it more expressive . */ public static < S > Feature < S , Boolean > is ( final Feature < ? super S , Boolean > feature ) { } }
return new Feature < S , Boolean > ( ) { @ Override public Boolean of ( S object ) { return feature . of ( object ) ; } @ Override public void describeTo ( Description description ) { description . appendText ( "is " ) . appendDescriptionOf ( feature ) ; } } ;
public class ResourceUtils { /** * Resolve the given resource location to a { @ code java . net . URL } . * < p > Does not check whether the URL actually exists ; simply returns * the URL that the given location would correspond to . * @ param resourceLocation the resource location to resolve : either a * " classpath : " pseudo URL , a " file : " URL , or a plain file path * @ return a corresponding URL object * @ throws FileNotFoundException if the resource cannot be resolved to a URL */ public static URL getURL ( String resourceLocation ) throws FileNotFoundException { } }
Assert . notNull ( resourceLocation , "Resource location must not be null" ) ; if ( resourceLocation . startsWith ( CLASSPATH_URL_PREFIX ) ) { String path = resourceLocation . substring ( CLASSPATH_URL_PREFIX . length ( ) ) ; ClassLoader cl = ClassUtils . getDefaultClassLoader ( ) ; URL url = ( cl != null ? cl . getResource ( path ) : ClassLoader . getSystemResource ( path ) ) ; if ( url == null ) { String description = "class path resource [" + path + "]" ; throw new FileNotFoundException ( description + " cannot be resolved to URL because it does not exist" ) ; } return url ; } try { // try URL return new URL ( resourceLocation ) ; } catch ( MalformedURLException ex ) { // no URL - > treat as file path try { return new File ( resourceLocation ) . toURI ( ) . toURL ( ) ; } catch ( MalformedURLException ex2 ) { throw new FileNotFoundException ( "Resource location [" + resourceLocation + "] is neither a URL not a well-formed file path" ) ; } }
public class Utils { /** * Escape the given literal < tt > value < / tt > and append it to the string builder < tt > sbuf < / tt > . If * < tt > sbuf < / tt > is < tt > null < / tt > , a new StringBuilder will be returned . The argument * < tt > standardConformingStrings < / tt > defines whether the backend expects standard - conforming * string literals or allows backslash escape sequences . * @ param sbuf the string builder to append to ; or < tt > null < / tt > * @ param value the string value * @ param standardConformingStrings if standard conforming strings should be used * @ return the sbuf argument ; or a new string builder for sbuf = = null * @ throws SQLException if the string contains a < tt > \ 0 < / tt > character */ public static StringBuilder escapeLiteral ( StringBuilder sbuf , String value , boolean standardConformingStrings ) throws SQLException { } }
if ( sbuf == null ) { sbuf = new StringBuilder ( ( value . length ( ) + 10 ) / 10 * 11 ) ; // Add 10 % for escaping . } doAppendEscapedLiteral ( sbuf , value , standardConformingStrings ) ; return sbuf ;
public class AWSGlobalAcceleratorClient { /** * Update a listener . * @ param updateListenerRequest * @ return Result of the UpdateListener operation returned by the service . * @ throws InvalidArgumentException * An argument that you specified is invalid . * @ throws InvalidPortRangeException * The port numbers that you specified are not valid numbers or are not unique for this accelerator . * @ throws ListenerNotFoundException * The listener that you specified doesn ' t exist . * @ throws InternalServiceErrorException * There was an internal error for AWS Global Accelerator . * @ throws LimitExceededException * Processing your request would cause you to exceed an AWS Global Accelerator limit . * @ sample AWSGlobalAccelerator . UpdateListener * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / globalaccelerator - 2018-08-08 / UpdateListener " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateListenerResult updateListener ( UpdateListenerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateListener ( request ) ;
public class OutHamp { /** * Sends a message to a given address */ public void queryResult ( OutputStream os , HeadersAmp headers , String address , long qId , Object value ) throws IOException { } }
init ( os ) ; OutH3 out = _out ; if ( out == null ) { return ; } if ( log . isLoggable ( _level ) ) { log . log ( _level , "hamp-query-result-w " + value + " (in " + this + ")" + "\n {id:" + qId + " to:" + address + ", " + headers + "," + os + "}" ) ; } try { out . writeLong ( MessageTypeHamp . QUERY_RESULT . ordinal ( ) ) ; writeHeaders ( out , headers ) ; writeToAddress ( out , address ) ; out . writeLong ( qId ) ; out . writeObject ( value ) ; // XXX : out . flushBuffer ( ) ; // out . flush ( ) ; } catch ( Throwable e ) { log . log ( Level . WARNING , e . toString ( ) , e ) ; throw e ; }
public class EndpointHandler { /** * Handles GET requests by calling response updater based on uri pattern . * @ param request HTTP request * @ param response HTTP response * @ throws HttpException in case of HTTP related issue * @ throws IOException in case of IO related issue */ public void handleGet ( HttpRequest request , HttpResponse response ) throws HttpException , IOException { } }
String uri = request . getRequestLine ( ) . getUri ( ) ; LOG . debug ( "uri {}" , uri ) ; try { ResponseUpdater ru = this . findResponseUpdater ( request ) ; LOG . debug ( "request updater {}" , ru ) ; ru . update ( response ) ; } catch ( IllegalStateException t ) { LOG . error ( "Cannot handle request" , t ) ; throw new HttpException ( "Cannot handle request" , t ) ; }
public class MimeHeaders { /** * Replaces the current value of the first header entry whose name matches * the given name with the given value , adding a new header if no existing header * name matches . This method also removes all matching headers after the first one . * Note that RFC822 headers can contain only US - ASCII characters . * @ param name a < code > String < / code > with the name of the header for * which to search * @ param value a < code > String < / code > with the value that will replace the * current value of the specified header * @ exception IllegalArgumentException if there was a problem in the * mime header name or the value being set * @ see # getHeader */ public void setHeader ( String name , String value ) { } }
boolean found = false ; if ( ( name == null ) || name . equals ( "" ) ) throw new IllegalArgumentException ( "Illegal MimeHeader name" ) ; for ( int i = 0 ; i < headers . size ( ) ; i ++ ) { MimeHeader hdr = ( MimeHeader ) headers . elementAt ( i ) ; if ( hdr . getName ( ) . equalsIgnoreCase ( name ) ) { if ( ! found ) { headers . setElementAt ( new MimeHeader ( hdr . getName ( ) , value ) , i ) ; found = true ; } else headers . removeElementAt ( i -- ) ; } } if ( ! found ) addHeader ( name , value ) ;
public class Context { /** * Return the value of a name , else fail . If the name is not present , a { @ link ContextException } is thrown . * @ param name * The name to look for . * @ return The value of the name . */ public Value lookup ( ILexNameToken name ) { } }
Value v = check ( name ) ; if ( v == null ) { VdmRuntimeError . abort ( name . getLocation ( ) , 4034 , "Name '" + name + "' not in scope" , this ) ; } return v ;
public class RegisterTaskDefinitionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RegisterTaskDefinitionRequest registerTaskDefinitionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( registerTaskDefinitionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( registerTaskDefinitionRequest . getFamily ( ) , FAMILY_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getTaskRoleArn ( ) , TASKROLEARN_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getExecutionRoleArn ( ) , EXECUTIONROLEARN_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getNetworkMode ( ) , NETWORKMODE_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getContainerDefinitions ( ) , CONTAINERDEFINITIONS_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getVolumes ( ) , VOLUMES_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getPlacementConstraints ( ) , PLACEMENTCONSTRAINTS_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getRequiresCompatibilities ( ) , REQUIRESCOMPATIBILITIES_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getCpu ( ) , CPU_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getMemory ( ) , MEMORY_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getPidMode ( ) , PIDMODE_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getIpcMode ( ) , IPCMODE_BINDING ) ; protocolMarshaller . marshall ( registerTaskDefinitionRequest . getProxyConfiguration ( ) , PROXYCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ArrayBasedStrategy { /** * - - - GET ALL ENDPOINTS - - - */ @ SuppressWarnings ( "unchecked" ) @ Override public List < T > getAllEndpoints ( ) { } }
ArrayList < T > list = new ArrayList < > ( endpoints . length ) ; for ( int i = 0 ; i < endpoints . length ; i ++ ) { list . add ( ( T ) endpoints [ i ] ) ; } return list ;
public class ConcatVectorNamespace { /** * This adds a sparse feature to a vector , setting the appropriate component of the given vector to the passed in * value . * @ param vector the vector * @ param featureName the feature whose value to set * @ param index the index of the one - hot vector to set , as a string , which we will translate into a mapping * @ param value the value we want to set this one - hot index to */ public void setSparseFeature ( ConcatVector vector , String featureName , String index , double value ) { } }
vector . setSparseComponent ( ensureFeature ( featureName ) , ensureSparseFeature ( featureName , index ) , value ) ;
public class JSQLParserAdapter { /** * To make ddal - jsqlparser work well , JSqlParser should include the feature of ' support getting jdbc parameter index ' . * And this feature is provided on the version of { @ link < a href = " https : / / github . com / JSQLParser / JSqlParser / releases / tag / jsqlparser - 0.9.7 " > 0.9.7 < / a > } . * This method is designed to check the necessary feature . */ public static void checkJSqlParserFeature ( ) throws JSQLParserException { } }
CCJSqlParserManager parserManager = new CCJSqlParserManager ( ) ; String sql = "SELECT * FROM tab_1 WHERE tab_1.col_1 = ? AND col_2 IN (SELECT DISTINCT col_2 FROM tab_2 WHERE col_3 LIKE ? AND col_4 > ?) LIMIT ?, ?" ; Select select = ( Select ) parserManager . parse ( new StringReader ( sql ) ) ; PlainSelect selectBody = ( PlainSelect ) select . getSelectBody ( ) ; AndExpression andExpression = ( AndExpression ) selectBody . getWhere ( ) ; EqualsTo equalsTo = ( EqualsTo ) andExpression . getLeftExpression ( ) ; JdbcParameter jdbcParameter = ( JdbcParameter ) equalsTo . getRightExpression ( ) ; Integer index1 = jdbcParameter . getIndex ( ) ; if ( index1 != 1 ) { throw new IllegalStateException ( "Current version of JSQLParser doesn't support the feature of 'support " + "get jdbc parameter index'" ) ; } InExpression inExpression = ( InExpression ) andExpression . getRightExpression ( ) ; SubSelect subSelect = ( SubSelect ) inExpression . getRightItemsList ( ) ; PlainSelect subSelectBody = ( PlainSelect ) subSelect . getSelectBody ( ) ; AndExpression subAndExpression = ( AndExpression ) subSelectBody . getWhere ( ) ; LikeExpression likeExpression = ( LikeExpression ) subAndExpression . getLeftExpression ( ) ; if ( ( ( JdbcParameter ) likeExpression . getRightExpression ( ) ) . getIndex ( ) != 2 ) { throw new IllegalStateException ( "Current version of JSQLParser doesn't support the feature of 'support get jdbc parameter index'" ) ; } GreaterThan greaterThan = ( GreaterThan ) subAndExpression . getRightExpression ( ) ; if ( ( ( JdbcParameter ) greaterThan . getRightExpression ( ) ) . getIndex ( ) != 3 ) { throw new IllegalStateException ( "Current version of JSQLParser doesn't support the feature of 'support get jdbc parameter index'" ) ; } Expression offset = selectBody . getLimit ( ) . getOffset ( ) ; Expression rowCount = selectBody . getLimit ( ) . getRowCount ( ) ; if ( ( ( JdbcParameter ) offset ) . getIndex ( ) != 4 || ( ( JdbcParameter ) rowCount ) . getIndex ( ) != 5 ) { throw new IllegalStateException ( "Current version of JSQLParser doesn't support the feature of 'support get jdbc parameter index'" ) ; }
public class Logging { /** * Configures the logging environment to use the first available config file in the list , printing an error if none of the * files * are suitable * @ param files */ public static void configureFiles ( Iterable < File > files ) { } }
for ( File file : files ) { if ( file != null && file . exists ( ) && file . canRead ( ) ) { setup ( file ) ; return ; } } System . out . println ( "(No suitable log config file found)" ) ;
public class BlobVault { /** * Returns string content of blob identified by specified blob handle . String contents cache is used . * @ param blobHandle blob handle * @ param txn { @ linkplain Transaction } instance * @ return string content of blob identified by specified blob handle * @ throws IOException if something went wrong */ @ Nullable public final String getStringContent ( final long blobHandle , @ NotNull final Transaction txn ) throws IOException { } }
String result ; result = stringContentCache . tryKey ( this , blobHandle ) ; if ( result == null ) { final InputStream content = getContent ( blobHandle , txn ) ; if ( content == null ) { logger . error ( "Blob string not found: " + getBlobLocation ( blobHandle ) , new FileNotFoundException ( ) ) ; } result = content == null ? null : UTFUtil . readUTF ( content ) ; if ( result != null && result . length ( ) <= config . getBlobStringsCacheMaxValueSize ( ) ) { if ( stringContentCache . getObject ( this , blobHandle ) == null ) { stringContentCache . cacheObject ( this , blobHandle , result ) ; } } } return result ;
public class AmazonPinpointEmailClient { /** * Specify a custom domain to use for open and click tracking elements in email that you send using Amazon Pinpoint . * @ param putConfigurationSetTrackingOptionsRequest * A request to add a custom domain for tracking open and click events to a configuration set . * @ return Result of the PutConfigurationSetTrackingOptions operation returned by the service . * @ throws NotFoundException * The resource you attempted to access doesn ' t exist . * @ throws TooManyRequestsException * Too many requests have been made to the operation . * @ throws BadRequestException * The input you provided is invalid . * @ sample AmazonPinpointEmail . PutConfigurationSetTrackingOptions * @ see < a * href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - email - 2018-07-26 / PutConfigurationSetTrackingOptions " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutConfigurationSetTrackingOptionsResult putConfigurationSetTrackingOptions ( PutConfigurationSetTrackingOptionsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutConfigurationSetTrackingOptions ( request ) ;
public class DatePicker { /** * Clears the date picker . Some browsers require clicking on an element outside of the date picker field to properly * reset the calendar to today ' s date . */ public void reset ( ) { } }
this . getElement ( ) . clear ( ) ; Grid . driver ( ) . findElement ( By . tagName ( "body" ) ) . click ( ) ; this . calendar = Calendar . getInstance ( ) ; this . getElement ( ) . click ( ) ;
public class Iterables { /** * Combines multiple iterables into a single iterable . The returned iterable * has an iterator that traverses the elements of each iterable in * { @ code inputs } . The input iterators are not polled until necessary . * < p > The returned iterable ' s iterator supports { @ code remove ( ) } when the * corresponding input iterator supports it . * @ throws NullPointerException if any of the provided iterables is null */ public static < T > Iterable < T > concat ( Iterable < ? extends T > ... inputs ) { } }
return concat ( ImmutableList . copyOf ( inputs ) ) ;
public class WaterMarkEventGenerator { /** * Computes the min ts across all streams . */ private long computeWaterMarkTs ( ) { } }
long ts = 0 ; // only if some data has arrived on each input stream if ( streamToTs . size ( ) >= inputStreams . size ( ) ) { ts = Long . MAX_VALUE ; for ( Map . Entry < GlobalStreamId , Long > entry : streamToTs . entrySet ( ) ) { ts = Math . min ( ts , entry . getValue ( ) ) ; } } return ts - eventTsLag ;
public class PermutationGroup { /** * Starts with an incomplete set of group generators in ` permutations ` and * expands it to include all possible combinations . * Ways to complete group : * - combinations of permutations pi x pj * - combinations with itself p ^ k */ public void completeGroup ( ) { } }
// Copy initial set to allow permutations to grow List < List < Integer > > gens = new ArrayList < List < Integer > > ( permutations ) ; // Keep HashSet version of permutations for fast lookup . Set < List < Integer > > known = new HashSet < List < Integer > > ( permutations ) ; // breadth - first search through the map of all members List < List < Integer > > currentLevel = new ArrayList < List < Integer > > ( permutations ) ; while ( currentLevel . size ( ) > 0 ) { List < List < Integer > > nextLevel = new ArrayList < List < Integer > > ( ) ; for ( List < Integer > p : currentLevel ) { for ( List < Integer > gen : gens ) { List < Integer > y = combine ( p , gen ) ; if ( ! known . contains ( y ) ) { nextLevel . add ( y ) ; // bypass addPermutation ( y ) for performance permutations . add ( y ) ; known . add ( y ) ; } } } currentLevel = nextLevel ; }
public class InternalSARLLexer { /** * $ ANTLR start " RULE _ RICH _ TEXT _ START " */ public final void mRULE_RICH_TEXT_START ( ) throws RecognitionException { } }
try { int _type = RULE_RICH_TEXT_START ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 16906:22 : ( ' \ \ ' \ \ ' \ \ ' ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' ) // InternalSARL . g : 16906:24 : ' \ \ ' \ \ ' \ \ ' ' ( RULE _ IN _ RICH _ STRING ) * ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? ' \ \ uFFFD ' { match ( "'''" ) ; // InternalSARL . g : 16906:33 : ( RULE _ IN _ RICH _ STRING ) * loop12 : do { int alt12 = 2 ; int LA12_0 = input . LA ( 1 ) ; if ( ( LA12_0 == '\'' ) ) { int LA12_1 = input . LA ( 2 ) ; if ( ( LA12_1 == '\'' ) ) { int LA12_4 = input . LA ( 3 ) ; if ( ( ( LA12_4 >= '\u0000' && LA12_4 <= '&' ) || ( LA12_4 >= '(' && LA12_4 <= '\uFFFC' ) || ( LA12_4 >= '\uFFFE' && LA12_4 <= '\uFFFF' ) ) ) { alt12 = 1 ; } } else if ( ( ( LA12_1 >= '\u0000' && LA12_1 <= '&' ) || ( LA12_1 >= '(' && LA12_1 <= '\uFFFC' ) || ( LA12_1 >= '\uFFFE' && LA12_1 <= '\uFFFF' ) ) ) { alt12 = 1 ; } } else if ( ( ( LA12_0 >= '\u0000' && LA12_0 <= '&' ) || ( LA12_0 >= '(' && LA12_0 <= '\uFFFC' ) || ( LA12_0 >= '\uFFFE' && LA12_0 <= '\uFFFF' ) ) ) { alt12 = 1 ; } switch ( alt12 ) { case 1 : // InternalSARL . g : 16906:33 : RULE _ IN _ RICH _ STRING { mRULE_IN_RICH_STRING ( ) ; } break ; default : break loop12 ; } } while ( true ) ; // InternalSARL . g : 16906:54 : ( ' \ \ ' ' ( ' \ \ ' ' ) ? ) ? int alt14 = 2 ; int LA14_0 = input . LA ( 1 ) ; if ( ( LA14_0 == '\'' ) ) { alt14 = 1 ; } switch ( alt14 ) { case 1 : // InternalSARL . g : 16906:55 : ' \ \ ' ' ( ' \ \ ' ' ) ? { match ( '\'' ) ; // InternalSARL . g : 16906:60 : ( ' \ \ ' ' ) ? int alt13 = 2 ; int LA13_0 = input . LA ( 1 ) ; if ( ( LA13_0 == '\'' ) ) { alt13 = 1 ; } switch ( alt13 ) { case 1 : // InternalSARL . g : 16906:60 : ' \ \ ' ' { match ( '\'' ) ; } break ; } } break ; } match ( '\uFFFD' ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class BaseMessageHeader { /** * Add this name / value pair to this matrix . * Note : Be careful as the matrix is updated by this method . . . If the matrix was the * properties matrix , there would be no way to know where the old tree entry was . * @ param mxString Source matrix ( or null if new ) . * @ param strName Name to add * @ param strValue Value to add * Skip if the value is null . */ public final Object [ ] [ ] addNameValue ( Object [ ] [ ] mxString , String strName , Object strValue ) { } }
if ( strName == null ) return mxString ; if ( strValue == null ) return mxString ; if ( mxString == null ) mxString = new Object [ 1 ] [ 2 ] ; else { for ( int i = 0 ; i < mxString . length ; i ++ ) { // If it is already there , replace the value . if ( strName . equalsIgnoreCase ( ( String ) mxString [ i ] [ MessageConstants . NAME ] ) ) { mxString [ i ] [ MessageConstants . VALUE ] = strValue ; return mxString ; } } Object [ ] [ ] tempString = mxString ; mxString = new Object [ tempString . length + 1 ] [ 2 ] ; for ( int i = 0 ; i < tempString . length ; i ++ ) { mxString [ i ] [ MessageConstants . NAME ] = tempString [ i ] [ MessageConstants . NAME ] ; mxString [ i ] [ MessageConstants . VALUE ] = tempString [ i ] [ MessageConstants . VALUE ] ; } } int i = mxString . length - 1 ; mxString [ i ] [ MessageConstants . NAME ] = strName ; mxString [ i ] [ MessageConstants . VALUE ] = strValue ; return mxString ;
public class TwiML { /** * Convert TwiML object to URL . * @ return URL string of TwiML object * @ throws TwiMLException if cannot generate URL */ public String toUrl ( ) throws TwiMLException { } }
try { return URLEncoder . encode ( toXml ( ) , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw new TwiMLException ( e . getMessage ( ) ) ; }
public class StatisticsJDBCStorageConnection { /** * { @ inheritDoc } */ public long getNodeDataSize ( String parentId ) throws RepositoryException { } }
Statistics s = ALL_STATISTICS . get ( GET_NODE_DATA_SIZE ) ; try { s . begin ( ) ; return wcs . getWorkspaceDataSize ( ) ; } finally { s . end ( ) ; }
public class WhiteboxImpl { /** * Get an array of { @ link Method } ' s that matches the method name and whose * argument types are assignable from { @ code expectedTypes } . Both * instance and static methods are taken into account . * @ param clazz The class that should contain the methods . * @ param methodName Names of the methods that will be returned . * @ param expectedTypes The methods must match * @ param exactParameterTypeMatch { @ code true } if the { @ code expectedTypes } must match * the parameter types must match exactly , { @ code false } if * the { @ code expectedTypes } are allowed to be converted * into primitive types if they are of a wrapped type and still * match . * @ return An array of Method ' s . */ public static Method [ ] getMethods ( Class < ? > clazz , String methodName , Class < ? > [ ] expectedTypes , boolean exactParameterTypeMatch ) { } }
List < Method > matchingArgumentTypes = new LinkedList < Method > ( ) ; Method [ ] methods = getMethods ( clazz , methodName ) ; for ( Method method : methods ) { final Class < ? > [ ] parameterTypes = method . getParameterTypes ( ) ; if ( checkIfParameterTypesAreSame ( method . isVarArgs ( ) , expectedTypes , parameterTypes ) || ( ! exactParameterTypeMatch && checkIfParameterTypesAreSame ( method . isVarArgs ( ) , convertParameterTypesToPrimitive ( expectedTypes ) , parameterTypes ) ) ) { matchingArgumentTypes . add ( method ) ; } } final Method [ ] methodArray = matchingArgumentTypes . toArray ( new Method [ 0 ] ) ; if ( methodArray . length == 0 ) { throw new MethodNotFoundException ( String . format ( "No methods matching the name(s) %s were found in the class hierarchy of %s." , concatenateStrings ( methodName ) , getType ( clazz ) ) ) ; } return matchingArgumentTypes . toArray ( new Method [ matchingArgumentTypes . size ( ) ] ) ;
public class ELTools { /** * Which annotations are given to an object displayed by a JSF component ? * @ param p _ component * the component * @ return null if there are no annotations , or if they cannot be accessed */ public static Annotation [ ] readAnnotations ( UIComponent p_component ) { } }
ValueExpression valueExpression = p_component . getValueExpression ( "value" ) ; if ( valueExpression != null && valueExpression . getExpressionString ( ) != null && valueExpression . getExpressionString ( ) . length ( ) > 0 ) { return readAnnotations ( valueExpression , p_component ) ; } return null ;
public class ULocale { /** * Create a tag string from the supplied parameters . The lang , script and region * parameters may be null references . If the lang parameter is an empty string , the * default value for an unknown language is written to the output buffer . * @ param lang The language tag to use . * @ param script The script tag to use . * @ param region The region tag to use . * @ param trailing Any trailing data to append to the new tag . * @ return The new String . */ static String createTagString ( String lang , String script , String region , String trailing ) { } }
return createTagString ( lang , script , region , trailing , null ) ;
public class InternalSARLLexer { /** * $ ANTLR start " RULE _ ML _ COMMENT " */ public final void mRULE_ML_COMMENT ( ) throws RecognitionException { } }
try { int _type = RULE_ML_COMMENT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 48797:17 : ( ' / * ' ( options { greedy = false ; } : . ) * ' * / ' ) // InternalSARL . g : 48797:19 : ' / * ' ( options { greedy = false ; } : . ) * ' * / ' { match ( "/*" ) ; // InternalSARL . g : 48797:24 : ( options { greedy = false ; } : . ) * loop49 : do { int alt49 = 2 ; int LA49_0 = input . LA ( 1 ) ; if ( ( LA49_0 == '*' ) ) { int LA49_1 = input . LA ( 2 ) ; if ( ( LA49_1 == '/' ) ) { alt49 = 2 ; } else if ( ( ( LA49_1 >= '\u0000' && LA49_1 <= '.' ) || ( LA49_1 >= '0' && LA49_1 <= '\uFFFF' ) ) ) { alt49 = 1 ; } } else if ( ( ( LA49_0 >= '\u0000' && LA49_0 <= ')' ) || ( LA49_0 >= '+' && LA49_0 <= '\uFFFF' ) ) ) { alt49 = 1 ; } switch ( alt49 ) { case 1 : // InternalSARL . g : 48797:52 : . { matchAny ( ) ; } break ; default : break loop49 ; } } while ( true ) ; match ( "*/" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class FullscreenVideoLayout { /** * Onclick action * Controls play button and fullscreen button . * @ param v View defined in XML */ @ Override public void onClick ( View v ) { } }
if ( v . getId ( ) == R . id . vcv_img_play ) { if ( isPlaying ( ) ) { pause ( ) ; } else { start ( ) ; } } else { setFullscreen ( ! isFullscreen ( ) ) ; }
public class ZonedDateTime { /** * Obtains an instance of { @ code ZonedDateTime } from a local date - time * using the preferred offset if possible . * The local date - time is resolved to a single instant on the time - line . * This is achieved by finding a valid offset from UTC / Greenwich for the local * date - time as defined by the { @ link ZoneRules rules } of the zone ID . * In most cases , there is only one valid offset for a local date - time . * In the case of an overlap , where clocks are set back , there are two valid offsets . * If the preferred offset is one of the valid offsets then it is used . * Otherwise the earlier valid offset is used , typically corresponding to " summer " . * In the case of a gap , where clocks jump forward , there is no valid offset . * Instead , the local date - time is adjusted to be later by the length of the gap . * For a typical one hour daylight savings change , the local date - time will be * moved one hour later into the offset typically corresponding to " summer " . * @ param localDateTime the local date - time , not null * @ param zone the time - zone , not null * @ param preferredOffset the zone offset , null if no preference * @ return the zoned date - time , not null */ public static ZonedDateTime ofLocal ( LocalDateTime localDateTime , ZoneId zone , ZoneOffset preferredOffset ) { } }
Objects . requireNonNull ( localDateTime , "localDateTime" ) ; Objects . requireNonNull ( zone , "zone" ) ; if ( zone instanceof ZoneOffset ) { return new ZonedDateTime ( localDateTime , ( ZoneOffset ) zone , zone ) ; } ZoneRules rules = zone . getRules ( ) ; List < ZoneOffset > validOffsets = rules . getValidOffsets ( localDateTime ) ; ZoneOffset offset ; if ( validOffsets . size ( ) == 1 ) { offset = validOffsets . get ( 0 ) ; } else if ( validOffsets . size ( ) == 0 ) { ZoneOffsetTransition trans = rules . getTransition ( localDateTime ) ; localDateTime = localDateTime . plusSeconds ( trans . getDuration ( ) . getSeconds ( ) ) ; offset = trans . getOffsetAfter ( ) ; } else { if ( preferredOffset != null && validOffsets . contains ( preferredOffset ) ) { offset = preferredOffset ; } else { offset = Objects . requireNonNull ( validOffsets . get ( 0 ) , "offset" ) ; // protect against bad ZoneRules } } return new ZonedDateTime ( localDateTime , offset , zone ) ;
public class TCAbortMessageImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # encode ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encode ( AsnOutputStream aos ) throws EncodeException { } }
if ( this . destinationTransactionId == null || this . destinationTransactionId . length != 4 ) throw new EncodeException ( "Error while encoding TCAbortMessage: destinationTransactionId is not defined or has not a length 4" ) ; if ( this . pAbortCause == null && ( this . dp == null && this . userAbortInformation == null ) ) throw new EncodeException ( "Error while encoding TCAbortMessage: neither PAbortCause nor DialogPortion/UserAbortInformation is defined" ) ; if ( this . pAbortCause != null && ( this . dp != null || this . userAbortInformation != null ) ) throw new EncodeException ( "Error while encoding TCAbortMessage: both PAbortCause and DialogPortion/UserAbortInformation is defined" ) ; try { aos . writeTag ( Tag . CLASS_PRIVATE , false , TCAbortMessage . _TAG_ABORT ) ; int pos = aos . StartContentDefiniteLength ( ) ; aos . writeOctetString ( Tag . CLASS_PRIVATE , TCQueryMessage . _TAG_TRANSACTION_ID , this . destinationTransactionId ) ; if ( this . pAbortCause != null ) { aos . writeInteger ( Tag . CLASS_PRIVATE , TCAbortMessage . _TAG_P_ABORT_CAUSE , this . pAbortCause . getType ( ) ) ; } else { if ( this . dp != null ) this . dp . encode ( aos ) ; if ( this . userAbortInformation != null ) { aos . writeTag ( Tag . CLASS_PRIVATE , false , TCAbortMessage . _TAG_USER_ABORT_INFORMATION ) ; int pos2 = aos . StartContentDefiniteLength ( ) ; ( ( UserInformationElementImpl ) this . userAbortInformation ) . encode ( aos ) ; aos . FinalizeContent ( pos2 ) ; } else { aos . writeTag ( Tag . CLASS_PRIVATE , false , TCAbortMessage . _TAG_USER_ABORT_INFORMATION ) ; aos . writeLength ( 0 ) ; } } aos . FinalizeContent ( pos ) ; } catch ( IOException e ) { throw new EncodeException ( "IOException while encoding TCAbortMessage: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new EncodeException ( "AsnException while encoding TCAbortMessage: " + e . getMessage ( ) , e ) ; }
public class DateTimeBrowser { /** * go This method reads the file , creates the table to display , * the window to display it in , and displays the window . * @ param fileName the name of the file to read . * @ param tryLines An estimate of the number of lines in * the file . */ private void go ( String [ ] args ) { } }
mainArgs = args ; setDefaultTimeZone ( ) ; // let user override if needed // setDefaultCloseOperation ( JFrame . EXIT _ ON _ CLOSE ) ; setDefaultCloseOperation ( WindowConstants . DISPOSE_ON_CLOSE ) ; JMenuBar menuBar = new JMenuBar ( ) ; setJMenuBar ( menuBar ) ; addMenus ( menuBar ) ; /* * Add a fast close listener */ addWindowListener ( new WindowAdapter ( ) { public void windowClosing ( WindowEvent e ) { setVisible ( false ) ; dispose ( ) ; System . exit ( 0 ) ; } } ) ; // Load current file , prime tables and JFrame . currFile = new LoadedFile ( mainArgs [ 0 ] ) ; TableView tView = getDefaultTableView ( ) ; resetDefaults ( tView ) ; // Set max size at start , and display the window . Dimension screenMax = Toolkit . getDefaultToolkit ( ) . getScreenSize ( ) ; setSize ( screenMax ) ; setVisible ( true ) ;
public class SeleniumCommand { /** * { @ inheritDoc } */ @ Override public String send ( String ... args ) throws Exception { } }
return commandProcessor . doCommand ( commandName , args ) ;
public class MappingCache { /** * Return a new or cached mapper * @ param clazz * class for the mapper * @ param annotationSet * annotation set for the mapper * @ return mapper */ @ SuppressWarnings ( { } }
"unchecked" } ) public < T > Mapping < T > getMapping ( Class < T > clazz , AnnotationSet < ? , ? > annotationSet , boolean includeParentFields ) { Mapping < T > mapping = ( Mapping < T > ) cache . get ( clazz ) ; // cast is safe as // this instance is // the one adding to // the map if ( mapping == null ) { // multiple threads can get here but that ' s OK mapping = new Mapping < T > ( clazz , annotationSet , includeParentFields ) ; cache . put ( clazz , mapping ) ; } return mapping ;
public class FileUtil { /** * if file doesn ' t exist , it returns the same file . otherwize , it will find free * file as follows : * if given file name is test . txt , then it searches for non existing file in order : * test2 . txt , test3 . txt , test4 . txt and so on * if given file name is test ( i , e with no extension ) , then it searches for non existing file in order : * test2 , test3 , test4 and so on */ public static File findFreeFile ( File file ) { } }
if ( ! file . exists ( ) ) return file ; String parts [ ] = split ( file . getName ( ) ) ; String pattern = parts [ 1 ] == null ? parts [ 0 ] + "${i}" : parts [ 0 ] + "${i}." + parts [ 1 ] ; return findFreeFile ( file . getParentFile ( ) , pattern , true ) ;
public class YearPicker { /** * Set the selected year . * @ param year The selected year value . */ public void setYear ( int year ) { } }
if ( mAdapter . getYear ( ) == year ) return ; mAdapter . setYear ( year ) ; goTo ( year ) ;
public class LaContainerImpl { @ Override public LaContainer findChild ( String namespace ) { } }
for ( LaContainer child : children ) { if ( namespace . equals ( child . getNamespace ( ) ) ) { return child ; } } for ( LaContainer child : children ) { final LaContainer nestedFound = child . findChild ( namespace ) ; if ( nestedFound != null ) { return nestedFound ; } } return null ;
public class MapModel { /** * Move a vector layer up ( = front ) one place . Note that at any time , all raster layers will always lie behind all * vector layers . This means that position 0 for a vector layer is the first ( = back ) vector layer to be drawn AFTER * all raster layers have already been drawn . * @ param layer * The vector layer to move more to the front . * @ return Returns if the re - ordering was successful or not . * @ since 1.8.0 */ public boolean moveVectorLayerUp ( VectorLayer layer ) { } }
int position = getLayerPosition ( layer ) ; return position >= 0 && moveVectorLayer ( layer , position + 1 ) ;
public class GisModelCurveCalculator { /** * This method calculates an array of Point2D that represents a ellipse . The distance * between it points is 1 angular unit * @ param center Point2D that represents the center of the ellipse * @ param majorAxisVector Point2D that represents the vector for the major axis * @ param axisRatio double value that represents the axis ratio * @ param initAngle double value that represents the start angle of the ellipse arc * @ param endAngle double value that represents the end angle of the ellipse arc * @ return Point2D [ ] An array of Point2D that represents the shape of the ellipse */ public static Point2D [ ] calculateGisModelEllipse ( Point2D center , Point2D majorAxisVector , double axisRatio , double initAngle , double endAngle ) { } }
Point2D majorPoint = new Point2D . Double ( center . getX ( ) + majorAxisVector . getX ( ) , center . getY ( ) + majorAxisVector . getY ( ) ) ; double orientation = Math . atan ( majorAxisVector . getY ( ) / majorAxisVector . getX ( ) ) ; double semiMajorAxisLength = center . distance ( majorPoint ) ; double semiMinorAxisLength = semiMajorAxisLength * axisRatio ; double eccentricity = Math . sqrt ( 1 - ( ( Math . pow ( semiMinorAxisLength , 2 ) ) / ( Math . pow ( semiMajorAxisLength , 2 ) ) ) ) ; int isa = ( int ) initAngle ; int iea = ( int ) endAngle ; double angulo ; Point2D [ ] pts ; if ( initAngle <= endAngle ) { pts = new Point2D [ ( iea - isa ) + 2 ] ; angulo = initAngle ; double r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; double x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; double y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; double xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; double yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ 0 ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; for ( int i = 1 ; i <= ( iea - isa ) + 1 ; i ++ ) { angulo = ( double ) ( isa + i ) ; r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ i ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; } angulo = endAngle ; r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ ( iea - isa ) + 1 ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; } else { pts = new Point2D [ ( 360 - isa ) + iea + 2 ] ; angulo = initAngle ; double r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; double x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; double y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; double xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; double yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ 0 ] = new Point2D . Double ( center . getX ( ) + r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , center . getY ( ) + r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ) ; for ( int i = 1 ; i <= ( 360 - isa ) ; i ++ ) { angulo = ( double ) ( isa + i ) ; r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ i ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; } for ( int i = ( 360 - isa ) + 1 ; i <= ( 360 - isa ) + iea ; i ++ ) { angulo = ( double ) ( i - ( 360 - isa ) ) ; r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ i ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; } angulo = endAngle ; r = semiMinorAxisLength / Math . sqrt ( 1 - ( ( Math . pow ( eccentricity , 2 ) ) * ( Math . pow ( Math . cos ( angulo * Math . PI / ( double ) 180.0 ) , 2 ) ) ) ) ; x = r * Math . cos ( angulo * Math . PI / ( double ) 180.0 ) ; y = r * Math . sin ( angulo * Math . PI / ( double ) 180.0 ) ; xrot = x * Math . cos ( orientation ) - y * Math . sin ( orientation ) ; yrot = x * Math . sin ( orientation ) + y * Math . cos ( orientation ) ; pts [ ( 360 - isa ) + iea + 1 ] = new Point2D . Double ( center . getX ( ) + xrot , center . getY ( ) + yrot ) ; } return pts ;
public class Record { /** * Get value { @ link Text } value * @ param label target label * @ return { @ link Text } value of the label . If it is not null . */ public Text getValueText ( String label ) { } }
HadoopObject o = getHadoopObject ( VALUE , label , ObjectUtil . STRING , "String" ) ; if ( o == null ) { return null ; } return ( Text ) o . getObject ( ) ;
public class Session { /** * Returns true if the session is empty , * e . g . does not contain anything else than the timestamp */ public boolean isEmpty ( ) { } }
for ( String key : data . keySet ( ) ) { if ( ! TS_KEY . equals ( key ) ) { return false ; } } return true ;
public class BigtableDataClient { /** * Convenience method for asynchronously reading a single row . If the row does not exist , the * future ' s value will be null . * < p > Sample code : * < pre > { @ code * try ( BigtableDataClient bigtableDataClient = BigtableDataClient . create ( " [ PROJECT ] " , " [ INSTANCE ] " ) ) { * String tableId = " [ TABLE ] " ; * / / Build the filter expression * Filters . Filter filter = FILTERS . chain ( ) * . filter ( FILTERS . qualifier ( ) . regex ( " prefix . * " ) ) * . filter ( FILTERS . limit ( ) . cellsPerRow ( 10 ) ) ; * ApiFuture < Row > futureResult = bigtableDataClient . readRowAsync ( tableId , ByteString . copyFromUtf8 ( " key " ) , filter ) ; * ApiFutures . addCallback ( futureResult , new ApiFutureCallback < Row > ( ) { * public void onFailure ( Throwable t ) { * if ( t instanceof NotFoundException ) { * System . out . println ( " Tried to read a non - existent table " ) ; * } else { * t . printStackTrace ( ) ; * public void onSuccess ( Row row ) { * if ( result ! = null ) { * System . out . println ( " Got row : " + result ) ; * } , MoreExecutors . directExecutor ( ) ) ; * } < / pre > */ public ApiFuture < Row > readRowAsync ( String tableId , ByteString rowKey , @ Nullable Filter filter ) { } }
Query query = Query . create ( tableId ) . rowKey ( rowKey ) ; if ( filter != null ) { query = query . filter ( filter ) ; } return readRowCallable ( ) . futureCall ( query ) ;
public class PasswordUtil { /** * Decode the provided string . The string should consist of the algorithm to be used for decoding and encoded string . * For example , { xor } CDo9Hgw = . * @ param encoded _ string the string to be decoded . * @ return The decoded string , null if there is any failure during decoding , or invalid or null encoded _ string . */ public static String passwordDecode ( String encoded_string ) { } }
/* * check input : * - - encoded _ string : any string , any length , cannot be null , * may start with valid ( supported ) crypto algorithm tag */ if ( encoded_string == null ) { // don ' t accept null password return null ; } String crypto_algorithm = getCryptoAlgorithm ( encoded_string ) ; if ( crypto_algorithm == null ) { // password not encoded return encoded_string ; } // valid input . . . decode password return decode_password ( removeCryptoAlgorithmTag ( encoded_string ) , crypto_algorithm ) ;
public class MCMPHandler { /** * Process a command targeting an application . * @ param exchange the http server exchange * @ param requestData the request data * @ param action the mgmt action * @ return * @ throws IOException */ void processAppCommand ( final HttpServerExchange exchange , final RequestData requestData , final MCMPAction action ) throws IOException { } }
final String contextPath = requestData . getFirst ( CONTEXT ) ; final String jvmRoute = requestData . getFirst ( JVMROUTE ) ; final String aliases = requestData . getFirst ( ALIAS ) ; if ( contextPath == null || jvmRoute == null || aliases == null ) { processError ( TYPESYNTAX , SMISFLD , exchange ) ; return ; } final List < String > virtualHosts = Arrays . asList ( aliases . split ( "," ) ) ; if ( virtualHosts == null || virtualHosts . isEmpty ( ) ) { processError ( TYPESYNTAX , SCONBAD , exchange ) ; return ; } String response = null ; switch ( action ) { case ENABLE : if ( ! container . enableContext ( contextPath , jvmRoute , virtualHosts ) ) { processError ( MCMPErrorCode . CANT_UPDATE_CONTEXT , exchange ) ; return ; } break ; case DISABLE : if ( ! container . disableContext ( contextPath , jvmRoute , virtualHosts ) ) { processError ( MCMPErrorCode . CANT_UPDATE_CONTEXT , exchange ) ; return ; } break ; case STOP : int i = container . stopContext ( contextPath , jvmRoute , virtualHosts ) ; final StringBuilder builder = new StringBuilder ( ) ; builder . append ( "Type=STOP-APP-RSP,JvmRoute=" ) . append ( jvmRoute ) ; builder . append ( "Alias=" ) . append ( aliases ) ; builder . append ( "Context=" ) . append ( contextPath ) ; builder . append ( "Requests=" ) . append ( i ) ; response = builder . toString ( ) ; break ; case REMOVE : if ( ! container . removeContext ( contextPath , jvmRoute , virtualHosts ) ) { processError ( MCMPErrorCode . CANT_UPDATE_CONTEXT , exchange ) ; return ; } break ; default : { processError ( TYPESYNTAX , SMISFLD , exchange ) ; return ; } } if ( response != null ) { sendResponse ( exchange , response ) ; } else { processOK ( exchange ) ; }
public class MethodExpressionImpl { /** * Evaluates the expression relative to the provided context , invokes the * method that was found using the supplied parameters , and returns the * result of the method invocation . * @ param context * The context of this evaluation . * @ param params * The parameters to pass to the method , or < code > null < / code > * if no parameters . * @ return the result of the method invocation ( < code > null < / code > if the * method has a < code > void < / code > return type ) . * @ throws NullPointerException * if context is < code > null < / code > or the base object is * < code > null < / code > on the last resolution . * @ throws PropertyNotFoundException * if one of the property resolutions failed because a specified * variable or property does not exist or is not readable . * @ throws MethodNotFoundException * if no suitable method can be found . * @ throws ELException * if an exception was thrown while performing property or * variable resolution . The thrown exception must be included as * the cause property of this exception , if available . If the * exception thrown is an < code > InvocationTargetException < / code > , * extract its < code > cause < / code > and pass it to the * < code > ELException < / code > constructor . * @ see javax . el . MethodExpression # invoke ( javax . el . ELContext , * java . lang . Object [ ] ) */ @ Override public Object invoke ( ELContext context , Object [ ] params ) throws PropertyNotFoundException , MethodNotFoundException , ELException { } }
EvaluationContext ctx = new EvaluationContext ( context , this . fnMapper , this . varMapper ) ; ctx . notifyBeforeEvaluation ( getExpressionString ( ) ) ; Object result = this . getNode ( ) . invoke ( ctx , this . paramTypes , params ) ; ctx . notifyAfterEvaluation ( getExpressionString ( ) ) ; return result ;
public class CmsMoveResourceTypeDialog { /** * Update the resource type . < p > * @ param window */ protected void updateResourceType ( Window window ) { } }
if ( ! ( ( CmsModuleRow ) m_table . getValue ( ) ) . equals ( new CmsModuleRow ( OpenCms . getModuleManager ( ) . getModule ( m_type . getModuleName ( ) ) ) ) ) { CmsModule newModule = ( ( CmsModuleRow ) m_table . getValue ( ) ) . getModule ( ) . clone ( ) ; CmsModule oldModule = OpenCms . getModuleManager ( ) . getModule ( m_type . getModuleName ( ) ) . clone ( ) ; m_type . setModuleName ( newModule . getName ( ) ) ; List < I_CmsResourceType > newTypes = Lists . newArrayList ( newModule . getResourceTypes ( ) ) ; newTypes . add ( m_type ) ; newModule . setResourceTypes ( newTypes ) ; List < CmsExplorerTypeSettings > oldSettings = new ArrayList < CmsExplorerTypeSettings > ( oldModule . getExplorerTypes ( ) ) ; CmsExplorerTypeSettings settings = new CmsExplorerTypeSettings ( ) ; settings . setName ( m_type . getTypeName ( ) ) ; settings = oldSettings . get ( oldSettings . indexOf ( settings ) ) ; oldSettings . remove ( settings ) ; List < CmsExplorerTypeSettings > newSettings = new ArrayList < CmsExplorerTypeSettings > ( newModule . getExplorerTypes ( ) ) ; newSettings . add ( settings ) ; oldModule . setExplorerTypes ( oldSettings ) ; newModule . setExplorerTypes ( newSettings ) ; List < I_CmsResourceType > oldTypes = Lists . newArrayList ( oldModule . getResourceTypes ( ) ) ; oldTypes . remove ( m_type ) ; oldModule . setResourceTypes ( oldTypes ) ; if ( m_schemaOK ) { List < String > oldResources = Lists . newArrayList ( oldModule . getResources ( ) ) ; oldResources . remove ( m_typeXML . getSchema ( ) ) ; oldModule . setResources ( oldResources ) ; List < String > newResources = Lists . newArrayList ( newModule . getResources ( ) ) ; newResources . add ( m_typeXML . getSchema ( ) ) ; newModule . setResources ( newResources ) ; } try { OpenCms . getModuleManager ( ) . updateModule ( A_CmsUI . getCmsObject ( ) , oldModule ) ; OpenCms . getModuleManager ( ) . updateModule ( A_CmsUI . getCmsObject ( ) , newModule ) ; OpenCms . getResourceManager ( ) . initialize ( A_CmsUI . getCmsObject ( ) ) ; OpenCms . getWorkplaceManager ( ) . removeExplorerTypeSettings ( oldModule ) ; OpenCms . getWorkplaceManager ( ) . addExplorerTypeSettings ( newModule ) ; OpenCms . getWorkplaceManager ( ) . initialize ( A_CmsUI . getCmsObject ( ) ) ; } catch ( CmsException e ) { LOG . error ( "Unable to move resource type" , e ) ; } } window . close ( ) ; A_CmsUI . get ( ) . reload ( ) ;
public class SimplifyExprVisitor { /** * collections */ @ Override protected void visitItemAccessNode ( ItemAccessNode node ) { } }
// simplify children first visitChildren ( node ) ; ExprNode baseExpr = node . getChild ( 0 ) ; ExprNode keyExpr = node . getChild ( 1 ) ; if ( baseExpr instanceof ListLiteralNode && keyExpr instanceof IntegerNode ) { ListLiteralNode listLiteral = ( ListLiteralNode ) baseExpr ; long index = ( ( IntegerNode ) keyExpr ) . getValue ( ) ; if ( index > 0 && index < listLiteral . numChildren ( ) ) { node . getParent ( ) . replaceChild ( node , listLiteral . getChild ( ( int ) index ) ) ; } else { // out of range node . getParent ( ) . replaceChild ( node , new NullNode ( node . getSourceLocation ( ) ) ) ; } } else if ( baseExpr instanceof MapLiteralNode ) { MapLiteralNode mapLiteral = ( MapLiteralNode ) baseExpr ; for ( int i = 0 ; i < mapLiteral . numChildren ( ) ; i += 2 ) { if ( ExprEquivalence . get ( ) . equivalent ( keyExpr , mapLiteral . getChild ( i ) ) ) { node . getParent ( ) . replaceChild ( node , mapLiteral . getChild ( i + 1 ) ) ; return ; } } // no matching key node . getParent ( ) . replaceChild ( node , new NullNode ( node . getSourceLocation ( ) ) ) ; }
public class CollectionInterpreter { /** * < p > executeRow . < / p > * @ param valuesRow a { @ link com . greenpepper . Example } object . * @ param headers a { @ link com . greenpepper . Example } object . * @ param rowFixtureAdapter a { @ link com . greenpepper . reflect . Fixture } object . */ private void executeRow ( Example valuesRow , Example headers , Fixture rowFixtureAdapter ) { } }
valuesRow . annotate ( Annotations . right ( ) ) ; Statistics rowStats = new Statistics ( ) ; for ( int i = 0 ; i != valuesRow . remainings ( ) ; ++ i ) { Example cell = valuesRow . at ( i ) ; if ( i < headers . remainings ( ) ) { // We can do the cast because # parseColumn returns an ExpectedColumn ExpectedColumn column = ( ExpectedColumn ) columns [ i ] ; try { chekOneCell ( rowFixtureAdapter , rowStats , cell , column ) ; } catch ( Exception e ) { cell . annotate ( exception ( e ) ) ; stats . exception ( ) ; } } else { cell . annotate ( ignored ( cell . getContent ( ) ) ) ; } } applyRowStatistic ( rowStats ) ;
public class PmcNxmlHelper { /** * Iterates recursively over all objects of this article , and tries to * extract its text * @ param article * @ return the extracted text from this article */ public static String extractText ( Article article ) { } }
StringBuilder sb = new StringBuilder ( ) ; // extract abstract List < Abstract > abstracts = article . getFront ( ) . getArticleMeta ( ) . getAbstract ( ) ; for ( Abstract abstrct : abstracts ) { for ( Sec sec : abstrct . getSec ( ) ) { processTextContent ( sec . getAddressOrAlternativesOrArray ( ) , sb , true ) ; } } sb . append ( '\n' ) ; // extract text Body body = article . getBody ( ) ; if ( body != null ) { for ( Sec sec : body . getSec ( ) ) { Title title = sec . getTitle ( ) ; if ( title != null && title . getContent ( ) != null ) { processTextContent ( title . getContent ( ) , sb , true ) ; sb . append ( '\n' ) ; } processTextContent ( sec . getAddressOrAlternativesOrArray ( ) , sb , false ) ; } } return removeNoise ( sb . toString ( ) ) ;
public class Cron4jTask { public < RESULT > OptionalThing < RESULT > syncRunningCall ( Function < TaskRunningState , RESULT > oneArgLambda ) { } }
synchronized ( runningState ) { if ( runningState . getBeginTime ( ) . isPresent ( ) ) { return OptionalThing . ofNullable ( oneArgLambda . apply ( runningState ) , ( ) -> { throw new IllegalStateException ( "Not found the result from your scope: " + jobType ) ; } ) ; } else { return OptionalThing . ofNullable ( null , ( ) -> { throw new IllegalStateException ( "Not running now: " + jobType ) ; } ) ; } }
public class ChangeListenerMap { /** * Returns the list of listeners for the specified property . * @ param name the name of the property * @ return the corresponding list of listeners */ public final synchronized L [ ] get ( String name ) { } }
return ( this . map != null ) ? this . map . get ( name ) : null ;
public class StatefulBeanO { /** * Passivate this < code > SessionBeanO < / code > and its * associated enterprise bean . < p > * @ exception RemoteException thrown if * this < code > BeanO < / code > instance cannot be activated < p > */ @ Override public final synchronized void passivate ( ) throws RemoteException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) // d144064 Tr . entry ( tc , "passivate: " + this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) // d468174 { Tr . debug ( tc , "extended persistence context bindID = " + ivExPcContext ) ; } long pmiCookie = - 1 ; // d177661.1 // beans can be passivated only between Xactions and not in a tx if ( state == TX_IN_METHOD || state == TX_METHOD_READY ) { if ( isTraceOn && tc . isEventEnabled ( ) ) // d144064 Tr . event ( tc , "State: " + StateStrs [ state ] + " Bean cannot be passivated in a transaction" ) ; throw new BeanOPassivationFailureException ( ) ; } setState ( METHOD_READY , PASSIVATING ) ; String lifeCycle = null ; // d367572.7 CallbackContextHelper contextHelper = null ; // d399469 , d630940 try { if ( pmiBean != null ) { // lidb1117.4 pmiCookie = pmiBean . passivationTime ( ) ; } if ( ivCallbackKind != CallbackKind . None ) { contextHelper = new CallbackContextHelper ( this ) ; beginLifecycleCallback ( LifecycleInterceptorWrapper . MID_PRE_PASSIVATE , contextHelper , CallbackContextHelper . Contexts . All ) ; // Invoke the PrePassivate callback if any needs to be called . if ( ivCallbackKind == CallbackKind . SessionBean ) { if ( isTraceOn && // d527372 TEBeanLifeCycleInfo . isTraceEnabled ( ) ) // d367572.7 { lifeCycle = "ejbPassivate" ; TEBeanLifeCycleInfo . traceEJBCallEntry ( lifeCycle ) ; // d161864 } sessionBean . ejbPassivate ( ) ; } else if ( ivCallbackKind == CallbackKind . InvocationContext ) { // Invoke the PrePassivate interceptor methods . BeanMetaData bmd = home . beanMetaData ; InterceptorMetaData imd = bmd . ivInterceptorMetaData ; // d450431 InterceptorProxy [ ] proxies = imd . ivPrePassivateInterceptors ; if ( proxies != null ) { if ( isTraceOn && // d527372 TEBeanLifeCycleInfo . isTraceEnabled ( ) ) // d367572.7 { lifeCycle = "PrePassivate" ; TEBeanLifeCycleInfo . traceEJBCallEntry ( lifeCycle ) ; // d161864 } InvocationContextImpl < ? > inv = getInvocationContext ( ) ; inv . doLifeCycle ( proxies , bmd . _moduleMetaData ) ; // d450431 , F743-14982 } } } // If uninstalling this bean class , do not write this bean // instance out to a file , as it would just be deleted . d112866 if ( ! uninstalling ) passivator . passivate ( this , home . beanMetaData ) ; // d648122 } catch ( RemoteException ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".passivate" , "425" , this ) ; pmiCookie = - 1 ; // lidb1117.4 if ( isTraceOn && tc . isEventEnabled ( ) ) // d144064 Tr . event ( tc , "passivate failed! " , new Object [ ] { this , ex } ) ; throw ex ; } finally { if ( lifeCycle != null ) // d367572.7 { TEBeanLifeCycleInfo . traceEJBCallExit ( lifeCycle ) ; // d367572.7 } if ( contextHelper != null ) { contextHelper . complete ( true ) ; } if ( pmiBean != null ) { pmiBean . passivationTime ( pmiCookie ) ; } setState ( PASSIVATING , PASSIVATED ) ; ivTimeoutElement . passivated = true ; // F61004.5 // Finally , transition the beanO to the destroyed state , without // calling ejbRemove . d730409 destroyNotRemove ( ) ; } if ( isZOS && ! removeServantRoutingAffinity ( ) ) // d646413.2 { throw new InvalidBeanOStateException ( "Could not DEQ session bean with key = " + beanId . getPrimaryKey ( ) ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) // d144064 Tr . exit ( tc , "passivate: " + getStateName ( state ) ) ;
public class Util { /** * Check whether a connection should alive or not . * @ param keepAliveConfig of the connection * @ param outboundRequestMsg of this particular transaction * @ return true if the connection should be kept alive * @ throws ConfigurationException for invalid configurations */ public static boolean isKeepAlive ( KeepAliveConfig keepAliveConfig , HttpCarbonMessage outboundRequestMsg ) throws ConfigurationException { } }
switch ( keepAliveConfig ) { case AUTO : return Float . valueOf ( ( String ) outboundRequestMsg . getProperty ( Constants . HTTP_VERSION ) ) > Constants . HTTP_1_0 ; case ALWAYS : return true ; case NEVER : return false ; default : // The execution will never reach here . In case execution reach here means it should be an invalid value // for keep - alive configurations . throw new ConfigurationException ( "Invalid keep-alive configuration value : " + keepAliveConfig . toString ( ) ) ; }
public class IntervalRegistry { /** * This method retrieves an Interval with the given name . If no one could be found it will * create a new Interval with given name - the length will be a guess . * TODO : This method should be renamed to " getOrCreateInterval " or something like that * TODO : This method MUST be synchonrized to avoid double Interval instances . . . . * @ param aName the Interval name * @ return the existing Interval or a new Interval with given name and guessed length */ public Interval getInterval ( String aName ) { } }
Interval interval = intervalsByName . get ( aName ) ; if ( interval == null ) { interval = createInterval ( aName , IntervalNameParser . guessLengthFromName ( aName ) ) ; } return interval ;
public class Console { /** * Displays node for the given repository , workspace and path . * @ param repository the name of the repository . * @ param workspace the name of workspace . * @ param path the path to the node . * @ param changeHistory store changes in browser history . */ public void displayContent ( String repository , String workspace , String path , boolean changeHistory ) { } }
contents . show ( repository , workspace , path , changeHistory ) ; displayRepository ( repository ) ; display ( contents ) ; changeRepositoryInURL ( repository , changeHistory ) ;
public class ComponentProxy { /** * Return the component node for a component that is represented * by a proxy in the tree . * @ param component the component * @ param componentChannel the component ' s channel * @ return the node representing the component in the tree */ @ SuppressWarnings ( { } }
"PMD.DataflowAnomalyAnalysis" } ) /* default */ static ComponentVertex getComponentProxy ( ComponentType component , Channel componentChannel ) { ComponentProxy componentProxy = null ; try { Field field = getManagerField ( component . getClass ( ) ) ; synchronized ( component ) { if ( ! field . isAccessible ( ) ) { // NOPMD , handle problem first field . setAccessible ( true ) ; componentProxy = ( ComponentProxy ) field . get ( component ) ; if ( componentProxy == null ) { componentProxy = new ComponentProxy ( field , component , componentChannel ) ; } field . setAccessible ( false ) ; } else { componentProxy = ( ComponentProxy ) field . get ( component ) ; if ( componentProxy == null ) { componentProxy = new ComponentProxy ( field , component , componentChannel ) ; } } } } catch ( SecurityException | IllegalAccessException e ) { throw ( RuntimeException ) ( new IllegalArgumentException ( "Cannot access component's manager attribute" ) ) . initCause ( e ) ; } return componentProxy ;