signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ServiceloaderMojo { /** * Writes the output for the service files to disk * @ param serviceImplementations * @ throws MojoExecutionException */ private void writeServiceFiles ( Map < String , List < String > > serviceImplementations ) throws MojoExecutionException { } }
// TODO give the user an option to write them to the source folder or // any other folder ? File parentFolder = new File ( getClassFolder ( ) , "META-INF" + File . separator + "services" ) ; if ( ! parentFolder . exists ( ) ) { parentFolder . mkdirs ( ) ; } for ( Entry < String , List < String > > interfaceClassName : serviceImplementations . entrySet ( ) ) { File serviceFile = new File ( parentFolder , interfaceClassName . getKey ( ) ) ; getLog ( ) . info ( "Generating service file " + serviceFile . getAbsolutePath ( ) ) ; FileWriter writer = null ; try { writer = new FileWriter ( serviceFile ) ; for ( String implementationClassName : interfaceClassName . getValue ( ) ) { getLog ( ) . info ( " + " + implementationClassName ) ; writer . write ( implementationClassName ) ; writer . write ( '\n' ) ; } buildContext . refresh ( serviceFile ) ; } catch ( IOException e ) { throw new MojoExecutionException ( "Error creating file " + serviceFile , e ) ; } finally { if ( writer != null ) { try { writer . close ( ) ; } catch ( IOException e ) { getLog ( ) . error ( e ) ; } } } }
public class EntryMapper { /** * Creates a paxtools object that * corresponds to the psi interaction . * Note : * psi . interactionElementType - & gt ; biopax Complex , MolecularInteraction , or GeneticInteraction * psi . interactionElementType . participantList - & gt ; biopax interaction / complex participants / components */ private Entity processInteraction ( Interaction interaction , Set < String > avail , Provenance pro , boolean isComplex ) { } }
Entity bpInteraction = null ; // interaction or complex boolean isGeneticInteraction = false ; // get interaction name / short name String name = null ; String shortName = null ; if ( interaction . hasNames ( ) ) { Names names = interaction . getNames ( ) ; name = ( names . hasFullName ( ) ) ? names . getFullName ( ) : "" ; shortName = ( names . hasShortLabel ( ) ) ? names . getShortLabel ( ) : "" ; } final Set < InteractionVocabulary > interactionVocabularies = new HashSet < InteractionVocabulary > ( ) ; if ( interaction . hasInteractionTypes ( ) ) { for ( CvType interactionType : interaction . getInteractionTypes ( ) ) { // generate InteractionVocabulary and set interactionType InteractionVocabulary cv = findOrCreateControlledVocabulary ( interactionType , InteractionVocabulary . class ) ; if ( cv != null ) interactionVocabularies . add ( cv ) ; } } // using experiment descriptions , create Evidence objects // ( yet , no experimental forms / roles / entities are created here ) Set < Evidence > bpEvidences = new HashSet < Evidence > ( ) ; if ( interaction . hasExperiments ( ) ) { bpEvidences = createBiopaxEvidences ( interaction ) ; } // A hack for e . g . IntAct or BIND " gene - protein " interactions ( ChIp and EMSA experiments ) // where the interactor type should probably not be ' gene ' ( but ' dna ' or ' rna ' ) Set < String > participantTypes = new HashSet < String > ( ) ; for ( Participant p : interaction . getParticipants ( ) ) { if ( p . hasInteractor ( ) ) { String type = getName ( p . getInteractor ( ) . getInteractorType ( ) . getNames ( ) ) ; if ( type == null ) type = "protein" ; // default type ( if unspecified ) participantTypes . add ( type . toLowerCase ( ) ) ; } else if ( p . hasInteraction ( ) ) { participantTypes . add ( "complex" ) ; // hierarchical complex build up } // else ? ( impossible ! ) } // If there are both genes and physical entities present , let ' s // replace ' gene ' with ' dna ' ( esp . true for " ch - ip " , " emsa " experiments ) ; // ( this won ' t affect experimental form entities if experimentalInteractor element exists ) if ( participantTypes . size ( ) > 1 && participantTypes . contains ( "gene" ) ) { // TODO a better criteria to reliably detect whether ' gene ' interactor type actually means Dna / DnaRegion or Rna / RnaRegion , or indeed Gene ) LOG . warn ( "Interaction: " + interaction . getId ( ) + ", name(s): " + shortName + " " + name + "; has both 'gene' and physical entity type participants: " + participantTypes + "; so we'll replace 'gene' with 'dna' (a quick fix)" ) ; for ( Participant p : interaction . getParticipants ( ) ) { if ( p . hasInteractor ( ) && p . getInteractor ( ) . getInteractorType ( ) . hasNames ( ) ) { String type = getName ( p . getInteractor ( ) . getInteractorType ( ) . getNames ( ) ) ; if ( "gene" . equalsIgnoreCase ( type ) ) { p . getInteractor ( ) . getInteractorType ( ) . getNames ( ) . setShortLabel ( "dna" ) ; } } } } // interate through the psi - mi participants , create corresp . biopax entities final Set < Entity > bpParticipants = new HashSet < Entity > ( ) ; for ( Participant participant : interaction . getParticipants ( ) ) { // get paxtools physical entity participant and add to participant list // ( this also adds experimental evidence and forms ) Entity bpParticipant = createBiopaxEntity ( participant , avail , pro ) ; if ( bpParticipant != null ) { if ( ! bpParticipants . contains ( bpParticipant ) ) bpParticipants . add ( bpParticipant ) ; } } // Process interaction attributes . final Set < String > comments = new HashSet < String > ( ) ; // Set GeneticInteraction flag . // As of BioGRID v3.1.72 ( at least ) , genetic interaction code can reside // as an attribute of the Interaction via " BioGRID Evidence Code " key if ( interaction . hasAttributes ( ) ) { for ( Attribute attribute : interaction . getAttributes ( ) ) { String key = attribute . getName ( ) ; // may be reset below String value = ( attribute . hasValue ( ) ) ? attribute . getValue ( ) : "" ; if ( key . equalsIgnoreCase ( BIOGRID_EVIDENCE_CODE ) && GENETIC_INTERACTIONS . contains ( value ) ) { isGeneticInteraction = true ; // important ! } comments . add ( key + ":" + value ) ; } } // or , if all participants are ' gene ' type , make a biopax GeneticInteraction if ( participantTypes . size ( ) == 1 && participantTypes . contains ( "gene" ) ) { isGeneticInteraction = true ; } // or , check another genetic interaction flag ( criteria ) if ( ! isGeneticInteraction ) { isGeneticInteraction = isGeneticInteraction ( bpEvidences ) ; } if ( ( isComplex || forceInteractionToComplex ) && ! isGeneticInteraction ) { bpInteraction = createComplex ( bpParticipants , interaction . getImexId ( ) , interaction . getId ( ) ) ; } else if ( isGeneticInteraction ) { bpInteraction = createGeneticInteraction ( bpParticipants , interactionVocabularies , interaction . getImexId ( ) , interaction . getId ( ) ) ; } else { bpInteraction = createMolecularInteraction ( bpParticipants , interactionVocabularies , interaction . getImexId ( ) , interaction . getId ( ) ) ; } for ( String c : comments ) { bpInteraction . addComment ( c ) ; } // add evidences to the interaction / complex bpEntity for ( Evidence evidence : bpEvidences ) { bpInteraction . addEvidence ( evidence ) ; // TODO : shall we add IntAct " figure legend " comment to the evidences as well ? } addAvailabilityAndProvenance ( bpInteraction , avail , pro ) ; if ( name != null ) bpInteraction . addName ( name ) ; if ( shortName != null ) { if ( shortName . length ( ) < 51 ) bpInteraction . setDisplayName ( shortName ) ; else bpInteraction . addName ( shortName ) ; } // add xrefs Set < Xref > bpXrefs = new HashSet < Xref > ( ) ; if ( interaction . hasXref ( ) ) { bpXrefs . addAll ( getXrefs ( interaction . getXref ( ) ) ) ; } for ( Xref bpXref : bpXrefs ) { bpInteraction . addXref ( bpXref ) ; } return bpInteraction ;
public class ConfigReader { /** * read config from confingPath * @ param confingFilename like axu4j . xml */ public static void load ( String confingFilename ) { } }
try { if ( config == null ) { config = new AXUConfig ( ) ; logger . debug ( "create new AXUConfig instance" ) ; } // DEV 모드인 경우 각 태그마다 config를 요청하므로 3초에 한 번씩만 설정을 로딩하도록 한다 . long nowTime = ( new Date ( ) ) . getTime ( ) ; if ( nowTime - lastLoadTime < 3000 ) { return ; } else { lastLoadTime = nowTime ; } Serializer serializer = new Persister ( ) ; URL configUrl = config . getClass ( ) . getClassLoader ( ) . getResource ( confingFilename ) ; if ( configUrl == null ) { configUrl = ClassLoader . getSystemClassLoader ( ) . getResource ( confingFilename ) ; } File configFile = new File ( configUrl . toURI ( ) ) ; serializer . read ( config , configFile ) ; logger . info ( "load config from {}" , configFile . getAbsolutePath ( ) ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "axu4j.xml\n{}" , config ) ; } } catch ( Exception e ) { logger . error ( "Fail to load axu4j.xml" , e ) ; }
public class S3StorageProvider { /** * { @ inheritDoc } */ public String addContent ( String spaceId , String contentId , String contentMimeType , Map < String , String > userProperties , long contentSize , String contentChecksum , InputStream content ) { } }
log . debug ( "addContent(" + spaceId + ", " + contentId + ", " + contentMimeType + ", " + contentSize + ", " + contentChecksum + ")" ) ; // Will throw if bucket does not exist String bucketName = getBucketName ( spaceId ) ; // Wrap the content in order to be able to retrieve a checksum ChecksumInputStream wrappedContent = new ChecksumInputStream ( content , contentChecksum ) ; String contentEncoding = removeContentEncoding ( userProperties ) ; userProperties = removeCalculatedProperties ( userProperties ) ; if ( contentMimeType == null || contentMimeType . equals ( "" ) ) { contentMimeType = DEFAULT_MIMETYPE ; } ObjectMetadata objMetadata = new ObjectMetadata ( ) ; objMetadata . setContentType ( contentMimeType ) ; if ( contentSize > 0 ) { objMetadata . setContentLength ( contentSize ) ; } if ( null != contentChecksum && ! contentChecksum . isEmpty ( ) ) { String encodedChecksum = ChecksumUtil . convertToBase64Encoding ( contentChecksum ) ; objMetadata . setContentMD5 ( encodedChecksum ) ; } if ( contentEncoding != null ) { objMetadata . setContentEncoding ( contentEncoding ) ; } if ( userProperties != null ) { for ( String key : userProperties . keySet ( ) ) { String value = userProperties . get ( key ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "[" + key + "|" + value + "]" ) ; } objMetadata . addUserMetadata ( getSpaceFree ( encodeHeaderKey ( key ) ) , encodeHeaderValue ( value ) ) ; } } PutObjectRequest putRequest = new PutObjectRequest ( bucketName , contentId , wrappedContent , objMetadata ) ; putRequest . setStorageClass ( DEFAULT_STORAGE_CLASS ) ; putRequest . setCannedAcl ( CannedAccessControlList . Private ) ; // Add the object String etag ; try { PutObjectResult putResult = s3Client . putObject ( putRequest ) ; etag = putResult . getETag ( ) ; } catch ( AmazonClientException e ) { if ( e instanceof AmazonS3Exception ) { AmazonS3Exception s3Ex = ( AmazonS3Exception ) e ; String errorCode = s3Ex . getErrorCode ( ) ; Integer statusCode = s3Ex . getStatusCode ( ) ; String message = MessageFormat . format ( "exception putting object {0} into {1}: errorCode={2}," + " statusCode={3}, errorMessage={4}" , contentId , bucketName , errorCode , statusCode , e . getMessage ( ) ) ; if ( errorCode . equals ( "InvalidDigest" ) || errorCode . equals ( "BadDigest" ) ) { log . error ( message , e ) ; String err = "Checksum mismatch detected attempting to add " + "content " + contentId + " to S3 bucket " + bucketName + ". Content was not added." ; throw new ChecksumMismatchException ( err , e , NO_RETRY ) ; } else if ( errorCode . equals ( "IncompleteBody" ) ) { log . error ( message , e ) ; throw new StorageException ( "The content body was incomplete for " + contentId + " to S3 bucket " + bucketName + ". Content was not added." , e , NO_RETRY ) ; } else if ( ! statusCode . equals ( HttpStatus . SC_SERVICE_UNAVAILABLE ) && ! statusCode . equals ( HttpStatus . SC_NOT_FOUND ) ) { log . error ( message , e ) ; } else { log . warn ( message , e ) ; } } else { String err = MessageFormat . format ( "exception putting object {0} into {1}: {2}" , contentId , bucketName , e . getMessage ( ) ) ; log . error ( err , e ) ; } // Check to see if file landed successfully in S3 , despite the exception etag = doesContentExistWithExpectedChecksum ( bucketName , contentId , contentChecksum ) ; if ( null == etag ) { String err = "Could not add content " + contentId + " with type " + contentMimeType + " and size " + contentSize + " to S3 bucket " + bucketName + " due to error: " + e . getMessage ( ) ; throw new StorageException ( err , e , NO_RETRY ) ; } } // Compare checksum String providerChecksum = getETagValue ( etag ) ; String checksum = wrappedContent . getMD5 ( ) ; StorageProviderUtil . compareChecksum ( providerChecksum , spaceId , contentId , checksum ) ; return providerChecksum ;
public class BNFHeadersImpl { /** * Method to marshall a header out in binary mode into the input * buffers ( expanding them if necessary ) . * @ param inBuffers * @ param elem * @ return WsByteBuffer [ ] */ protected WsByteBuffer [ ] marshallBinaryHeader ( WsByteBuffer [ ] inBuffers , HeaderElement elem ) { } }
if ( elem . wasRemoved ( ) ) { return inBuffers ; } WsByteBuffer [ ] buffers = inBuffers ; final byte [ ] value = elem . asRawBytes ( ) ; if ( null != value ) { HeaderKeys key = elem . getKey ( ) ; if ( ! key . isUndefined ( ) ) { buffers = putInt ( GenericConstants . KNOWN_HEADER , buffers ) ; buffers = putInt ( elem . getKey ( ) . getOrdinal ( ) , buffers ) ; } else { buffers = putInt ( GenericConstants . UNKNOWN_HEADER , buffers ) ; buffers = putInt ( key . getByteArray ( ) . length , buffers ) ; buffers = putBytes ( key . getByteArray ( ) , buffers ) ; } buffers = putInt ( elem . getValueLength ( ) , buffers ) ; buffers = putBytes ( value , elem . getOffset ( ) , elem . getValueLength ( ) , buffers ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Marshalling: " + elem . getName ( ) + " [" + elem . getDebugValue ( ) + "]" ) ; } } return buffers ;
public class DistributedLayoutManager { /** * Handles pushing changes made to the passed - in node into the user ' s layout . If the node is an * ILF node then the change is recorded via directives in the PLF if such changes are allowed by * the owning fragment . If the node is a user owned node then the changes are applied directly * to the corresponding node in the PLF . */ @ Override public synchronized boolean updateNode ( IUserLayoutNodeDescription node ) throws PortalException { } }
if ( canUpdateNode ( node ) ) { String nodeId = node . getId ( ) ; IUserLayoutNodeDescription oldNode = getNode ( nodeId ) ; if ( oldNode instanceof IUserLayoutChannelDescription ) { IUserLayoutChannelDescription oldChanDesc = ( IUserLayoutChannelDescription ) oldNode ; if ( ! ( node instanceof IUserLayoutChannelDescription ) ) { throw new PortalException ( "Change channel to folder is " + "not allowed by updateNode() method! Occurred " + "in layout for " + owner . getUserName ( ) + "." ) ; } IUserLayoutChannelDescription newChanDesc = ( IUserLayoutChannelDescription ) node ; updateChannelNode ( nodeId , newChanDesc , oldChanDesc ) ; } else { // must be a folder IUserLayoutFolderDescription oldFolderDesc = ( IUserLayoutFolderDescription ) oldNode ; if ( oldFolderDesc . getId ( ) . equals ( getRootFolderId ( ) ) ) throw new PortalException ( "Update of root node is not currently allowed!" ) ; if ( node instanceof IUserLayoutFolderDescription ) { IUserLayoutFolderDescription newFolderDesc = ( IUserLayoutFolderDescription ) node ; updateFolderNode ( nodeId , newFolderDesc , oldFolderDesc ) ; } } this . updateCacheKey ( ) ; return true ; } return false ;
public class BuildUniqueIdentifierHelper { /** * Get a project according to its full name . * @ param fullName The full name of the project . * @ return The project which answers the full name . */ private static AbstractProject < ? , ? > getProject ( String fullName ) { } }
Item item = Hudson . getInstance ( ) . getItemByFullName ( fullName ) ; if ( item != null && item instanceof AbstractProject ) { return ( AbstractProject < ? , ? > ) item ; } return null ;
public class JarHandler { /** * Build and return the externalized string representation of url . * @ return String the externalized string representation of url * @ param url * a URL */ @ Override protected String toExternalForm ( URL url ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "jar:" ) ; sb . append ( url . getFile ( ) ) ; String ref = url . getRef ( ) ; if ( ref != null ) { sb . append ( ref ) ; } return sb . toString ( ) ;
public class SubscriptionItemStream { /** * Mark this itemstream as awaiting deletion */ public void markAsToBeDeleted ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "markAsToBeDeleted" ) ; toBeDeleted = true ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "markAsToBeDeleted" ) ;
public class OpenCmsCore { /** * Writes the XML configuration for the provided configuration class . < p > * @ param clazz the configuration class to write the XML for */ protected void writeConfiguration ( Class < ? > clazz ) { } }
// exception handling is provided here to ensure identical log messages try { m_configurationManager . writeConfiguration ( clazz ) ; } catch ( IOException e ) { CmsLog . getLog ( CmsConfigurationManager . class ) . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_ERROR_WRITING_CONFIG_1 , clazz . getName ( ) ) , e ) ; } catch ( CmsConfigurationException e ) { CmsLog . getLog ( CmsConfigurationManager . class ) . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_ERROR_WRITING_CONFIG_1 , clazz . getName ( ) ) , e ) ; }
public class JSONSAXHandler { /** * Method to flush out anything remaining in the buffers . */ public void flushBuffer ( ) throws IOException { } }
if ( logger . isLoggable ( Level . FINER ) ) logger . entering ( className , "flushBuffer()" ) ; if ( this . osWriter != null ) { this . osWriter . flush ( ) ; } if ( logger . isLoggable ( Level . FINER ) ) logger . exiting ( className , "flushBuffer()" ) ;
public class StreamHelper { /** * Get the content of the passed Spring resource as one big string in the passed * character set . * @ param aISP * The resource to read . May not be < code > null < / code > . * @ param aCharset * The character set to use . May not be < code > null < / code > . * @ return < code > null < / code > if the resolved input stream is < code > null < / code > , * the content otherwise . */ @ Nullable @ ReturnsMutableCopy public static ICommonsList < String > readStreamLines ( @ Nullable final IHasInputStream aISP , @ Nonnull final Charset aCharset ) { } }
return readStreamLines ( aISP , aCharset , 0 , CGlobal . ILLEGAL_UINT ) ;
public class RecurlyClient { /** * Redeem a { @ link Coupon } on an account . * @ param couponCode redeemed coupon id * @ return the { @ link Coupon } object */ public Redemption redeemCoupon ( final String couponCode , final Redemption redemption ) { } }
return doPOST ( Coupon . COUPON_RESOURCE + "/" + couponCode + Redemption . REDEEM_RESOURCE , redemption , Redemption . class ) ;
public class CmsResourceTypeStatResultList { /** * Adds a result to the list . < p > * @ param result to be added */ public void addResult ( CmsResourceTypeStatResult result ) { } }
if ( ! m_results . contains ( result ) ) { m_results . add ( result ) ; m_updated = false ; } else { m_results . remove ( result ) ; m_results . add ( result ) ; m_updated = true ; }
public class AcroFields { /** * Sets different values in a list selection . * No appearance is generated yet ; nor does the code check if multiple select is allowed . * @ paramnamethe name of the field * @ paramvaluean array with values that need to be selected * @ returntrue only if the field value was changed * @ since 2.1.4 */ public boolean setListSelection ( String name , String [ ] value ) throws IOException , DocumentException { } }
Item item = getFieldItem ( name ) ; if ( item == null ) return false ; PdfName type = item . getMerged ( 0 ) . getAsName ( PdfName . FT ) ; if ( ! PdfName . CH . equals ( type ) ) { return false ; } String [ ] options = getListOptionExport ( name ) ; PdfArray array = new PdfArray ( ) ; for ( int i = 0 ; i < value . length ; i ++ ) { for ( int j = 0 ; j < options . length ; j ++ ) { if ( options [ j ] . equals ( value [ i ] ) ) { array . add ( new PdfNumber ( j ) ) ; } } } item . writeToAll ( PdfName . I , array , Item . WRITE_MERGED | Item . WRITE_VALUE ) ; item . writeToAll ( PdfName . V , null , Item . WRITE_MERGED | Item . WRITE_VALUE ) ; item . writeToAll ( PdfName . AP , null , Item . WRITE_MERGED | Item . WRITE_WIDGET ) ; item . markUsed ( this , Item . WRITE_VALUE | Item . WRITE_WIDGET ) ; return true ;
public class Goro { /** * Gives access to Goro instance that is provided by a service . * @ param binder Goro service binder * @ return Goro instance provided by the service */ public static Goro from ( final IBinder binder ) { } }
if ( binder instanceof GoroService . GoroBinder ) { return ( ( GoroService . GoroBinder ) binder ) . goro ( ) ; } throw new IllegalArgumentException ( "Cannot get Goro from " + binder ) ;
public class ModelsEngine { /** * The Gamma function . * @ param x * @ return the calculated gamma function . */ public static double gamma ( double x ) { } }
double tmp = ( x - 0.5 ) * log ( x + 4.5 ) - ( x + 4.5 ) ; double ser = 1.0 + 76.18009173 / ( x + 0 ) - 86.50532033 / ( x + 1 ) + 24.01409822 / ( x + 2 ) - 1.231739516 / ( x + 3 ) + 0.00120858003 / ( x + 4 ) - 0.00000536382 / ( x + 5 ) ; double gamma = exp ( tmp + log ( ser * sqrt ( 2 * PI ) ) ) ; return gamma ;
public class LogManager { /** * Runs through the log removing segments until the size of the log is at least * logRetentionSize bytes in size * @ throws IOException */ private int cleanupSegmentsToMaintainSize ( final Log log ) throws IOException { } }
if ( logRetentionSize < 0 || log . size ( ) < logRetentionSize ) return 0 ; List < LogSegment > toBeDeleted = log . markDeletedWhile ( new LogSegmentFilter ( ) { long diff = log . size ( ) - logRetentionSize ; public boolean filter ( LogSegment segment ) { diff -= segment . size ( ) ; return diff >= 0 ; } } ) ; return deleteSegments ( log , toBeDeleted ) ;
public class ChemModel { /** * { @ inheritDoc } */ @ Override public boolean isEmpty ( ) { } }
if ( setOfMolecules != null && ! setOfMolecules . isEmpty ( ) ) return false ; if ( setOfReactions != null && ! setOfReactions . isEmpty ( ) ) return false ; if ( ringSet != null && ! ringSet . isEmpty ( ) ) return false ; if ( crystal != null && ! crystal . isEmpty ( ) ) return false ; return true ;
public class GraphFunctionParser { /** * Parse the weight and orientation ( s ) from two strings , given in arbitrary * order . * @ param arg1 Weight or orientation * @ param arg2 Weight or orientation */ public void parseWeightAndOrientation ( String arg1 , String arg2 ) { } }
if ( ( arg1 == null && arg2 == null ) || ( isWeightString ( arg1 ) && arg2 == null ) || ( arg1 == null && isWeightString ( arg2 ) ) ) { // Disable default orientations ( D and WD ) . throw new IllegalArgumentException ( "You must specify the orientation." ) ; } if ( isWeightString ( arg1 ) && isWeightString ( arg2 ) ) { throw new IllegalArgumentException ( "Cannot specify the weight column twice." ) ; } if ( isOrientationString ( arg1 ) && isOrientationString ( arg2 ) ) { throw new IllegalArgumentException ( "Cannot specify the orientation twice." ) ; } if ( isWeightString ( arg1 ) || isOrientationString ( arg2 ) ) { setWeightAndOrientation ( arg1 , arg2 ) ; } if ( isOrientationString ( arg1 ) || isWeightString ( arg2 ) ) { setWeightAndOrientation ( arg2 , arg1 ) ; }
public class Model { /** * Deletes immediate children . */ private void deleteOne2ManyChildrenShallow ( OneToManyAssociation association ) { } }
String targetTable = metaModelOf ( association . getTargetClass ( ) ) . getTableName ( ) ; new DB ( metaModelLocal . getDbName ( ) ) . exec ( "DELETE FROM " + targetTable + " WHERE " + association . getFkName ( ) + " = ?" , getId ( ) ) ;
public class SSOClient { /** * 使用 < code > client _ id < / code > 和 < code > client _ secret < / code > 通过 < code > grant _ type = client _ credentials < / code > 的方式 * 获取access token 。 * 返回SSO颁发给当前应用的access token , 代表当前应用的身份 。 * @ see < a href = " http : / / openid . net / specs / openid - connect - core - 1_0 . html # ClientAuthentication " > ClientAuthentication < / a > * @ since 3.0.1 */ public AccessToken obtainAccessTokenByClientCredentials ( ) throws TokenExpiredException { } }
String key = "obtainAccessTokenByClientCredentials:" + config . getClientId ( ) ; AccessToken accessToken = getAccessTokenFromCache ( key ) ; if ( accessToken != null ) { return accessToken ; } accessToken = tp ( ) . obtainAccessTokenByClientCredentials ( ) ; cp ( ) . put ( key , accessToken , accessToken . getExpires ( ) ) ; return accessToken ;
public class WsByteBufferUtils { /** * Convert a buffer into a byte array . A null or empty buffer will return a * null * byte [ ] . * @ param buff * @ return byte [ ] */ public static final byte [ ] asByteArray ( WsByteBuffer buff ) { } }
if ( null == buff ) return null ; int size = buff . limit ( ) ; if ( 0 == size ) { return null ; } byte [ ] output = new byte [ size ] ; int position = buff . position ( ) ; buff . position ( 0 ) ; buff . get ( output ) ; buff . position ( position ) ; return output ;
public class AwsSecurityFindingFilters { /** * The source domain of network - related information about a finding . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNetworkSourceDomain ( java . util . Collection ) } or { @ link # withNetworkSourceDomain ( java . util . Collection ) } * if you want to override the existing values . * @ param networkSourceDomain * The source domain of network - related information about a finding . * @ return Returns a reference to this object so that method calls can be chained together . */ public AwsSecurityFindingFilters withNetworkSourceDomain ( StringFilter ... networkSourceDomain ) { } }
if ( this . networkSourceDomain == null ) { setNetworkSourceDomain ( new java . util . ArrayList < StringFilter > ( networkSourceDomain . length ) ) ; } for ( StringFilter ele : networkSourceDomain ) { this . networkSourceDomain . add ( ele ) ; } return this ;
public class CmsSecurityManager { /** * Returns all resources of organizational units for which the current user has * the given role role . < p > * @ param dbc the current database context * @ param role the role to check * @ return a list of { @ link org . opencms . file . CmsResource } objects * @ throws CmsException if something goes wrong */ protected List < CmsResource > getManageableResources ( CmsDbContext dbc , CmsRole role ) throws CmsException { } }
CmsOrganizationalUnit ou = m_driverManager . readOrganizationalUnit ( dbc , role . getOuFqn ( ) ) ; if ( hasRole ( dbc , dbc . currentUser ( ) , role ) ) { return m_driverManager . getResourcesForOrganizationalUnit ( dbc , ou ) ; } List < CmsResource > resources = new ArrayList < CmsResource > ( ) ; Iterator < CmsOrganizationalUnit > it = m_driverManager . getOrganizationalUnits ( dbc , ou , false ) . iterator ( ) ; while ( it . hasNext ( ) ) { CmsOrganizationalUnit orgUnit = it . next ( ) ; resources . addAll ( getManageableResources ( dbc , role . forOrgUnit ( orgUnit . getName ( ) ) ) ) ; } return resources ;
public class UserCoreDao { /** * Query the SQL for a single result typed object * @ param < T > * result value type * @ param sql * sql statement * @ param args * arguments * @ param column * column index * @ return result , null if no result * @ since 3.1.0 */ public < T > T querySingleTypedResult ( String sql , String [ ] args , int column ) { } }
return db . querySingleTypedResult ( sql , args , column ) ;
public class RowExtractors { /** * Create an extractor that extracts the name from the node at the given position in the row . * @ param indexInRow the index of the node in the rows ; must be valid * @ param cache the cache containing the nodes ; may not be null * @ param types the type system ; may not be null * @ return the path extractor ; never null */ public static ExtractFromRow extractLocalName ( final int indexInRow , final NodeCache cache , TypeSystem types ) { } }
final TypeFactory < String > type = types . getStringFactory ( ) ; final boolean trace = NodeSequence . LOGGER . isTraceEnabled ( ) ; return new ExtractFromRow ( ) { @ Override public TypeFactory < String > getType ( ) { return type ; } @ Override public Object getValueInRow ( RowAccessor row ) { CachedNode node = row . getNode ( indexInRow ) ; String name = node . getName ( cache ) . getLocalName ( ) ; if ( trace ) NodeSequence . LOGGER . trace ( "Extracting name from {0}: {1}" , node . getPath ( cache ) , name ) ; return name ; } @ Override public String toString ( ) { return "(extract-local-name)" ; } } ;
public class Smb2ChangeNotifyRequest { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2 # writeBytesWireFormat ( byte [ ] , int ) */ @ Override protected int writeBytesWireFormat ( byte [ ] dst , int dstIndex ) { } }
int start = dstIndex ; SMBUtil . writeInt2 ( 32 , dst , dstIndex ) ; SMBUtil . writeInt2 ( this . notifyFlags , dst , dstIndex + 2 ) ; dstIndex += 4 ; SMBUtil . writeInt4 ( this . outputBufferLength , dst , dstIndex ) ; dstIndex += 4 ; System . arraycopy ( this . fileId , 0 , dst , dstIndex , 16 ) ; dstIndex += 16 ; SMBUtil . writeInt4 ( this . completionFilter , dst , dstIndex ) ; dstIndex += 4 ; dstIndex += 4 ; // Reserved return dstIndex - start ;
public class RepositoryBrowser { /** * Returns all the registered { @ link RepositoryBrowser } descriptors . */ public static DescriptorExtensionList < RepositoryBrowser < ? > , Descriptor < RepositoryBrowser < ? > > > all ( ) { } }
return ( DescriptorExtensionList ) Jenkins . getInstance ( ) . getDescriptorList ( RepositoryBrowser . class ) ;
public class DTMDefaultBaseIterators { /** * Get an iterator that can navigate over an XPath Axis , predicated by * the extended type ID . * Returns an iterator that must be initialized * with a start node ( using iterator . setStartNode ( ) ) . * @ param axis One of Axes . ANCESTORORSELF , etc . * @ param type An extended type ID . * @ return A DTMAxisIterator , or null if the given axis isn ' t supported . */ public DTMAxisIterator getTypedAxisIterator ( int axis , int type ) { } }
DTMAxisIterator iterator = null ; /* This causes an error when using patterns for elements that do not exist in the DOM ( translet types which do not correspond to a DOM type are mapped to the DOM . ELEMENT type ) . */ // if ( type = = NO _ TYPE ) { // return ( EMPTYITERATOR ) ; // else if ( type = = ELEMENT ) { // iterator = new FilterIterator ( getAxisIterator ( axis ) , // getElementFilter ( ) ) ; // else { switch ( axis ) { case Axis . SELF : iterator = new TypedSingletonIterator ( type ) ; break ; case Axis . CHILD : iterator = new TypedChildrenIterator ( type ) ; break ; case Axis . PARENT : return ( new ParentIterator ( ) . setNodeType ( type ) ) ; case Axis . ANCESTOR : return ( new TypedAncestorIterator ( type ) ) ; case Axis . ANCESTORORSELF : return ( ( new TypedAncestorIterator ( type ) ) . includeSelf ( ) ) ; case Axis . ATTRIBUTE : return ( new TypedAttributeIterator ( type ) ) ; case Axis . DESCENDANT : iterator = new TypedDescendantIterator ( type ) ; break ; case Axis . DESCENDANTORSELF : iterator = ( new TypedDescendantIterator ( type ) ) . includeSelf ( ) ; break ; case Axis . FOLLOWING : iterator = new TypedFollowingIterator ( type ) ; break ; case Axis . PRECEDING : iterator = new TypedPrecedingIterator ( type ) ; break ; case Axis . FOLLOWINGSIBLING : iterator = new TypedFollowingSiblingIterator ( type ) ; break ; case Axis . PRECEDINGSIBLING : iterator = new TypedPrecedingSiblingIterator ( type ) ; break ; case Axis . NAMESPACE : iterator = new TypedNamespaceIterator ( type ) ; break ; case Axis . ROOT : iterator = new TypedRootIterator ( type ) ; break ; default : throw new DTMException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_TYPED_ITERATOR_AXIS_NOT_IMPLEMENTED , new Object [ ] { Axis . getNames ( axis ) } ) ) ; // " Error : typed iterator for axis " // + Axis . names [ axis ] + " not implemented " ) ; } } return ( iterator ) ;
public class ServletSendErrorTask { /** * { @ inheritDoc } */ public void perform ( TaskRequest req , TaskResponse res ) { } }
HttpServletResponse resp = ( HttpServletResponse ) response . evaluate ( req , res ) ; Integer sc = ( Integer ) statusCode . evaluate ( req , res ) ; String msg = ( String ) message . evaluate ( req , res ) ; try { resp . sendError ( sc , msg ) ; } catch ( Throwable t ) { throw new RuntimeException ( "Error setting status code " + sc + " with message " + msg , t ) ; }
public class XDMClientParentSbb { /** * ( non - Javadoc ) * @ see XDMClientParent # subscribeFailed ( int , * XDMClientChildSbbLocalObject , * java . net . URI ) */ @ Override public void subscribeFailed ( int responseCode , XDMClientChildSbbLocalObject child , String notifier ) { } }
tracer . severe ( "Failed to subscribe, response = " + responseCode ) ;
public class SegmentHelper { /** * The method sends a WireCommand to iterate over table entries . * @ param tableName Qualified table name . * @ param suggestedEntryCount Suggested number of { @ link TableKey } s to be returned by the SegmentStore . * @ param state Last known state of the iterator . * @ param delegationToken The token to be presented to the segmentstore . * @ param clientRequestId Request id . * @ return A CompletableFuture that will return the next set of { @ link TableKey } s returned from the SegmentStore . */ public CompletableFuture < TableSegment . IteratorItem < TableEntry < byte [ ] , byte [ ] > > > readTableEntries ( final String tableName , final int suggestedEntryCount , final IteratorState state , final String delegationToken , final long clientRequestId ) { } }
final Controller . NodeUri uri = getTableUri ( tableName ) ; final WireCommandType type = WireCommandType . READ_TABLE_ENTRIES ; final long requestId = ( clientRequestId == RequestTag . NON_EXISTENT_ID ) ? idGenerator . get ( ) : clientRequestId ; final IteratorState token = ( state == null ) ? IteratorState . EMPTY : state ; final CompletableFuture < TableSegment . IteratorItem < TableEntry < byte [ ] , byte [ ] > > > result = new CompletableFuture < > ( ) ; final FailingReplyProcessor replyProcessor = new FailingReplyProcessor ( ) { @ Override public void connectionDropped ( ) { log . warn ( requestId , "readTableEntries {} Connection dropped" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . ConnectionDropped ) ) ; } @ Override public void wrongHost ( WireCommands . WrongHost wrongHost ) { log . warn ( requestId , "readTableEntries {} wrong host" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . UnknownHost ) ) ; } @ Override public void noSuchSegment ( WireCommands . NoSuchSegment noSuchSegment ) { log . warn ( requestId , "readTableEntries {} NoSuchSegment" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . SegmentDoesNotExist ) ) ; } @ Override public void tableEntriesRead ( WireCommands . TableEntriesRead tableEntriesRead ) { log . debug ( requestId , "readTableEntries {} successful." , tableName ) ; final IteratorState state = IteratorState . fromBytes ( tableEntriesRead . getContinuationToken ( ) ) ; final List < TableEntry < byte [ ] , byte [ ] > > entries = tableEntriesRead . getEntries ( ) . getEntries ( ) . stream ( ) . map ( e -> { WireCommands . TableKey k = e . getKey ( ) ; TableKey < byte [ ] > tableKey = new TableKeyImpl < > ( getArray ( k . getData ( ) ) , new KeyVersionImpl ( k . getKeyVersion ( ) ) ) ; return new TableEntryImpl < > ( tableKey , getArray ( e . getValue ( ) . getData ( ) ) ) ; } ) . collect ( Collectors . toList ( ) ) ; result . complete ( new TableSegment . IteratorItem < > ( state , entries ) ) ; } @ Override public void processingFailure ( Exception error ) { log . error ( requestId , "readTableEntries {} failed" , tableName , error ) ; handleError ( error , result , type ) ; } @ Override public void authTokenCheckFailed ( WireCommands . AuthTokenCheckFailed authTokenCheckFailed ) { result . completeExceptionally ( new WireCommandFailedException ( new AuthenticationException ( authTokenCheckFailed . toString ( ) ) , type , WireCommandFailedException . Reason . AuthFailed ) ) ; } } ; WireCommands . ReadTableEntries cmd = new WireCommands . ReadTableEntries ( requestId , tableName , delegationToken , suggestedEntryCount , token . toBytes ( ) ) ; sendRequestAsync ( cmd , replyProcessor , result , ModelHelper . encode ( uri ) ) ; return result ;
public class MessageTransport { /** * Add this field in the Record ' s field sequence . */ public BaseField setupField ( int iFieldSeq ) { } }
BaseField field = null ; // if ( iFieldSeq = = 0) // field = new CounterField ( this , ID , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 1) // field = new RecordChangedField ( this , LAST _ CHANGED , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 2) // field = new BooleanField ( this , DELETED , Constants . DEFAULT _ FIELD _ LENGTH , null , new Boolean ( false ) ) ; // field . setHidden ( true ) ; if ( iFieldSeq == 3 ) field = new StringField ( this , DESCRIPTION , 30 , null , null ) ; if ( iFieldSeq == 4 ) field = new StringField ( this , CODE , 10 , null , null ) ; if ( iFieldSeq == 5 ) field = new PasswordPropertiesField ( this , PROPERTIES , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 6 ) field = new MessageTransportTypeField ( this , MESSAGE_TRANSPORT_TYPE , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( field == null ) field = super . setupField ( iFieldSeq ) ; return field ;
public class NetworkMonitor { /** * Creates a new network monitor using the supplied options . * @ throws KNXException on problems on monitor creation */ private void createMonitor ( LinkListener l ) throws KNXException { } }
final KNXMediumSettings medium = ( KNXMediumSettings ) options . get ( "medium" ) ; if ( options . containsKey ( "serial" ) ) { final String p = ( String ) options . get ( "serial" ) ; try { m = new KNXNetworkMonitorFT12 ( Integer . parseInt ( p ) , medium ) ; } catch ( final NumberFormatException e ) { m = new KNXNetworkMonitorFT12 ( p , medium ) ; } } else { // create local and remote socket address for monitor link final InetSocketAddress local = createLocalSocket ( ( InetAddress ) options . get ( "localhost" ) , ( Integer ) options . get ( "localport" ) ) ; final InetSocketAddress host = new InetSocketAddress ( ( InetAddress ) options . get ( "host" ) , ( ( Integer ) options . get ( "port" ) ) . intValue ( ) ) ; // create the monitor link , based on the KNXnet / IP protocol // specify whether network address translation shall be used , // and tell the physical medium of the KNX network m = new KNXNetworkMonitorIP ( local , host , options . containsKey ( "nat" ) , medium ) ; } // add the log writer for monitor log events LogManager . getManager ( ) . addWriter ( m . getName ( ) , w ) ; // on console we want to have all possible information , so enable // decoding of a received raw frame by the monitor link m . setDecodeRawFrames ( true ) ; // listen to monitor link events m . addMonitorListener ( l ) ; // we always need a link closed notification ( even with user supplied listener ) m . addMonitorListener ( new LinkListener ( ) { public void indication ( FrameEvent e ) { } public void linkClosed ( CloseEvent e ) { System . out . println ( "network monitor exit, " + e . getReason ( ) ) ; synchronized ( NetworkMonitor . this ) { NetworkMonitor . this . notify ( ) ; } } } ) ;
public class A_CmsListResourceCollector { /** * Sets the resources parameter . < p > * @ param resources the list of resource names to use */ protected void setResourcesParam ( List < String > resources ) { } }
m_collectorParameter += I_CmsListResourceCollector . SEP_PARAM + I_CmsListResourceCollector . PARAM_RESOURCES + I_CmsListResourceCollector . SEP_KEYVAL ; if ( resources == null ) { // search anywhere m_collectorParameter += "/" ; } else { m_collectorParameter += CmsStringUtil . collectionAsString ( resources , "#" ) ; }
public class BasicFunctionsRuntime { /** * Returns the smallest ( closest to negative infinity ) integer value that is greater than or equal * to the argument . */ public static long ceil ( SoyValue arg ) { } }
if ( arg instanceof IntegerData ) { return ( ( IntegerData ) arg ) . longValue ( ) ; } else { return ( long ) Math . ceil ( arg . floatValue ( ) ) ; }
public class StopMojo { /** * { @ inheritDoc } */ @ SuppressWarnings ( "rawtypes" ) public void doExecute ( ) throws MojoExecutionException , MojoFailureException { } }
Map pluginContext = session . getPluginContext ( pluginDescriptor , project ) ; FileSystemServer mrm = ( FileSystemServer ) pluginContext . get ( StartMojo . getFileSystemServerKey ( getMojoExecution ( ) ) ) ; if ( mrm == null ) { getLog ( ) . info ( "Mock Repository Manager was not started" ) ; return ; } String url = mrm . getUrl ( ) ; getLog ( ) . info ( "Stopping Mock Repository Manager on " + url ) ; mrm . finish ( ) ; try { mrm . waitForFinished ( ) ; getLog ( ) . info ( "Mock Repository Manager " + url + " is stopped." ) ; pluginContext . remove ( FileSystemServer . class . getName ( ) ) ; } catch ( InterruptedException e ) { throw new MojoExecutionException ( e . getMessage ( ) , e ) ; }
public class Blacklist { /** * Adds a new blacklisted ID . * @ param id ID of the blacklisted molecule * @ param score the ubiquity score * @ param context context of ubiquity */ public void addEntry ( String id , int score , RelType context ) { } }
this . score . put ( id , score ) ; this . context . put ( id , context ) ;
public class StringUtils { /** * Extracts the filename from a path . * @ param filePath * the string to parse * @ return the path containing only the filename itself */ public static String getFilenameFromString ( String filePath ) { } }
String newPath = filePath ; int forwardInd = filePath . lastIndexOf ( "/" ) ; int backInd = filePath . lastIndexOf ( "\\" ) ; if ( forwardInd > backInd ) { newPath = filePath . substring ( forwardInd + 1 ) ; } else { newPath = filePath . substring ( backInd + 1 ) ; } // still original if no occurance of " / " or " \ " return newPath ;
public class OnlineUpdateUASparser { /** * Loads the data file from user - agent - string . info * @ throws IOException */ private void loadDataFromInternet ( ) throws IOException { } }
URL url = new URL ( DATA_RETRIVE_URL ) ; InputStream is = url . openStream ( ) ; try { PHPFileParser fp = new PHPFileParser ( is ) ; createInternalDataStructre ( fp . getSections ( ) ) ; } finally { is . close ( ) ; }
public class MultiCameraToEquirectangular { /** * Provides recent images from all the cameras ( should be time and lighting synchronized ) and renders them * into an equirectangular image . The images must be in the same order that the cameras were added . * @ param cameraImages List of camera images */ public void render ( List < T > cameraImages ) { } }
if ( cameraImages . size ( ) != cameras . size ( ) ) throw new IllegalArgumentException ( "Input camera image count doesn't equal the expected number" ) ; // avoid divide by zero errors by initializing it to a small non - zero value GImageMiscOps . fill ( weightImage , 1e-4 ) ; GImageMiscOps . fill ( averageImage , 0 ) ; for ( int i = 0 ; i < cameras . size ( ) ; i ++ ) { Camera c = cameras . get ( i ) ; T cameraImage = cameraImages . get ( i ) ; distort . setModel ( c . equiToCamera ) ; distort . apply ( cameraImage , cameraRendered ) ; // / sum up the total weight for each pixel PixelMath . add ( weightImage , c . mask , weightImage ) ; // apply the weight for this image to the rendered image GPixelMath . multiply ( c . mask , cameraRendered , workImage ) ; // add the result to the average image GPixelMath . add ( workImage , averageImage , averageImage ) ; } // comput the final output by dividing GPixelMath . divide ( averageImage , weightImage , averageImage ) ;
public class HazelcastSlidingWindowRequestRateLimiter { @ Override public boolean resetLimit ( String key ) { } }
IMap < Object , Object > map = hz . getMap ( key ) ; if ( map == null || map . isEmpty ( ) ) { return false ; } map . clear ( ) ; map . destroy ( ) ; return true ;
public class Reflections { /** * Copy paste from Guice MoreTypes */ @ SuppressWarnings ( "unckecked" ) public static Class < ? > getRawType ( Type type ) { } }
if ( type instanceof Class < ? > ) { // type is a normal class . return ( Class < ? > ) type ; } else if ( type instanceof ParameterizedType ) { ParameterizedType parameterizedType = ( ParameterizedType ) type ; // I ' m not exactly sure why getRawType ( ) returns Type instead of Class . // Neal isn ' t either but suspects some pathological case related // to nested classes exists . Type rawType = parameterizedType . getRawType ( ) ; checkArgument ( rawType instanceof Class , "Expected a Class, but <%s> is of type %s" , type , type . getClass ( ) . getName ( ) ) ; // noinspection ConstantConditions return ( Class < ? > ) rawType ; } else if ( type instanceof GenericArrayType ) { Type componentType = ( ( GenericArrayType ) type ) . getGenericComponentType ( ) ; return Array . newInstance ( getRawType ( componentType ) , 0 ) . getClass ( ) ; } else if ( type instanceof TypeVariable ) { // we could use the variable ' s bounds , but that ' ll won ' t work if there are multiple . // having a raw type that ' s more general than necessary is okay return Object . class ; } else { throw new IllegalArgumentException ( "Expected a Class, ParameterizedType, or " + "GenericArrayType, but <" + type + "> is of type " + type . getClass ( ) . getName ( ) ) ; }
public class CmsDomUtil { /** * Creates an iFrame element with the given name attribute . < p > * @ param name the name attribute value * @ return the iFrame element */ public static com . google . gwt . dom . client . Element createIFrameElement ( String name ) { } }
return getDOMImpl ( ) . createIFrameElement ( Document . get ( ) , name ) ;
public class ShareError { /** * List of accounts impacted by the error . * @ param accounts * List of accounts impacted by the error . */ public void setAccounts ( java . util . Collection < String > accounts ) { } }
if ( accounts == null ) { this . accounts = null ; return ; } this . accounts = new java . util . ArrayList < String > ( accounts ) ;
public class GUIObjectDetails { /** * DO NOT tamper with this method */ public String returnArg ( String key ) { } }
SeLionElement element = HtmlSeLionElementSet . getInstance ( ) . findMatch ( key ) ; if ( element == null ) { return key ; } if ( ! element . isUIElement ( ) ) { return key ; } return key . substring ( 0 , key . indexOf ( element . getElementClass ( ) ) ) ;
public class MediumPennTemplateAcceptor { /** * A package - private method that checks whether the path matches any of the * predefined templates . This method is provided so other template classes * have access to the accept logic used by this class . * @ param path a dependency path * @ return { @ code true } if the path matches a template */ static boolean acceptsInternal ( DependencyPath path ) { } }
// First check whether the minimum template acceptor would allow this // path if ( MinimumPennTemplateAcceptor . acceptsInternal ( path ) ) return true ; // Filter out paths that can ' t match the template due to length if ( path . length ( ) > 3 ) return false ; int pathLength = path . length ( ) ; // The medium set of templates contains " null " matches which are wild // cards against any part of speech . We handle these by generating // three possible pattern instances that represent the provided path , // two of which include the wildcard " null " , one each end . If any of // these patterns are found in the medium set , the path is valid . StringBuilder nullStart = new StringBuilder ( pathLength * 16 ) ; StringBuilder nullEnd = new StringBuilder ( pathLength * 16 ) ; StringBuilder noNulls = new StringBuilder ( pathLength * 16 ) ; // Iterate over each pair in the path and create the pattern string that // represents this path . The pattern string is pos : rel : pos [ , . . . ] . DependencyTreeNode first = path . first ( ) ; for ( int i = 1 ; i < pathLength ; ++ i ) { DependencyTreeNode second = path . getNode ( i ) ; // Check that the nodes weren ' t filtered out . If so reject the path // even if the part of speech and relation text may have matched a // template . if ( first . word ( ) . equals ( IteratorFactory . EMPTY_TOKEN ) ) return false ; // Get the relation between the two nodes String rel = path . getRelation ( i - 1 ) ; String firstPos = first . pos ( ) ; String secPos = second . pos ( ) ; // Check whether each POS has a class category to which it should be // mapped . These classes are necessary to handle the singificant // number of variations for a general category of POS ' s , e . g . verb // - > VBZ , VBJ , etc . , which were not present when the MINIPAR tags // were designed by Padó and Lapata . String class1 = POS_TAG_TO_CLASS . get ( firstPos ) ; String class2 = POS_TAG_TO_CLASS . get ( secPos ) ; if ( class1 != null ) firstPos = class1 ; if ( class2 != null ) secPos = class2 ; // Similarly , in order to handle the lex - mod relation , we check // whether the relation , e . g . PMOD , can be mapped to the general // lexical modifier class . String relClass = REL_TO_CLASS . get ( rel ) ; if ( relClass != null ) rel = relClass ; // Create the three relation patterns by checking the current index // compared to the path length . nullStart . append ( ( i == 1 ) ? "(null)" : firstPos ) ; nullStart . append ( ":" ) . append ( rel ) . append ( ":" ) . append ( secPos ) ; nullEnd . append ( firstPos ) . append ( ":" ) . append ( rel ) . append ( ":" ) ; nullEnd . append ( ( i + 1 == pathLength ) ? "(null)" : secPos ) ; noNulls . append ( firstPos ) . append ( ":" ) . append ( rel ) . append ( ":" ) . append ( secPos ) ; // Check whether more elements existing , and if so , add the ' , ' if ( i + 1 < pathLength ) { nullStart . append ( "," ) ; nullEnd . append ( "," ) ; noNulls . append ( "," ) ; } // Last , shift over the node first = second ; } // Extra case for the last token in the path if ( first . word ( ) . equals ( IteratorFactory . EMPTY_TOKEN ) ) return false ; boolean match = MEDIUM_TEMPLATES . contains ( noNulls . toString ( ) ) || MEDIUM_TEMPLATES . contains ( nullStart . toString ( ) ) || MEDIUM_TEMPLATES . contains ( nullEnd . toString ( ) ) ; return match ;
public class ProductSegmentation { /** * Sets the browserLanguageSegment value for this ProductSegmentation . * @ param browserLanguageSegment * The browser language segmentation . { @ link BrowserLanguageTargeting # isTargeted } * must be { @ code true } . * < p > This attribute is optional . */ public void setBrowserLanguageSegment ( com . google . api . ads . admanager . axis . v201902 . BrowserLanguageTargeting browserLanguageSegment ) { } }
this . browserLanguageSegment = browserLanguageSegment ;
public class Classes { /** * Get file resource , that is , resource with < em > file < / em > protocol . Try to load resource throwing exception if not * found . If resource protocol is < em > file < / em > returns it as { @ link java . io . File } otherwise throws unsupported * operation . * @ param name resource name . * @ return resource file . * @ throws NoSuchBeingException if named resource can ' t be loaded . * @ throws UnsupportedOperationException if named resource protocol is not < em > file < / em > . */ public static File getResourceAsFile ( String name ) { } }
URL resourceURL = getResource ( name ) ; if ( resourceURL == null ) { throw new NoSuchBeingException ( "Resource |%s| not found." , name ) ; } String protocol = resourceURL . getProtocol ( ) ; if ( "file" . equals ( protocol ) ) try { return new File ( resourceURL . toURI ( ) ) ; } catch ( URISyntaxException e ) { throw new BugError ( "Invalid syntax on URL returned by getResource." ) ; } throw new UnsupportedOperationException ( "Can't get a file for a resource using protocol" + protocol ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcElectricMotorType ( ) { } }
if ( ifcElectricMotorTypeEClass == null ) { ifcElectricMotorTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 218 ) ; } return ifcElectricMotorTypeEClass ;
public class CmsFavoriteEntry { /** * Converts this object to JSON . * @ return the JSON representation * @ throws JSONException if JSON operations fail */ public JSONObject toJson ( ) throws JSONException { } }
JSONObject result = new JSONObject ( ) ; if ( m_detailId != null ) { result . put ( JSON_DETAIL , "" + m_detailId ) ; } if ( m_siteRoot != null ) { result . put ( JSON_SITEROOT , m_siteRoot ) ; } if ( m_structureId != null ) { result . put ( JSON_STRUCTUREID , "" + m_structureId ) ; } if ( m_projectId != null ) { result . put ( JSON_PROJECT , "" + m_projectId ) ; } if ( m_type != null ) { result . put ( JSON_TYPE , "" + m_type . getJsonId ( ) ) ; } return result ;
public class PatternsImpl { /** * Adds a batch of patterns to the specified application . * @ param appId The application ID . * @ param versionId The version ID . * @ param patterns A JSON array containing patterns . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; PatternRuleInfo & gt ; object */ public Observable < List < PatternRuleInfo > > batchAddPatternsAsync ( UUID appId , String versionId , List < PatternRuleCreateObject > patterns ) { } }
return batchAddPatternsWithServiceResponseAsync ( appId , versionId , patterns ) . map ( new Func1 < ServiceResponse < List < PatternRuleInfo > > , List < PatternRuleInfo > > ( ) { @ Override public List < PatternRuleInfo > call ( ServiceResponse < List < PatternRuleInfo > > response ) { return response . body ( ) ; } } ) ;
public class ArquillianInterceptor { /** * / * ( non - Javadoc ) * @ see org . spockframework . runtime . extension . AbstractMethodInterceptor # interceptSetupSpecMethod ( org . spockframework . runtime . extension . IMethodInvocation ) */ @ Override public void interceptSetupSpecMethod ( IMethodInvocation invocation ) throws Throwable { } }
Class < ? > specClass = invocation . getSpec ( ) . getReflection ( ) ; log . fine ( "beforeClass " + specClass . getName ( ) ) ; getTestRunner ( ) . beforeClass ( specClass , new InvocationExecutor ( invocation ) ) ;
public class MetaFieldUtil { /** * Returns a new MetaFieldInfo array of all Fields annotated with MetaField * in the runtime object type . If the object is null , this method will * return null . NOTE : This method recursively searches the object . * @ param obj The runtime instance of the object to search . It must be an * instance of the type . If its a subclass of the type , this method * will only return MetaFields of the type passed in . * @ param stringForNullValues If a field is null , this is the string to swap * in as the String value vs . " null " showing up . * @ param ignoreAnnotatedName Whether to ignore the name of the MetaField * and always return the field name instead . If false , this method will * use the annotated name if it exists , otherwise it ' ll use the field name . * @ return The MetaFieldInfo array */ public static MetaFieldInfo [ ] toMetaFieldInfoArray ( Object obj , String stringForNullValues , boolean ignoreAnnotatedName ) { } }
return toMetaFieldInfoArray ( ( obj != null ? obj . getClass ( ) : null ) , obj , stringForNullValues , ignoreAnnotatedName , true ) ;
public class AdminRelatedqueryAction { @ Execute public HtmlResponse details ( final int crudMode , final String id ) { } }
verifyCrudMode ( crudMode , CrudMode . DETAILS ) ; saveToken ( ) ; return asDetailsHtml ( ) . useForm ( EditForm . class , op -> { op . setup ( form -> { relatedQueryService . getRelatedQuery ( id ) . ifPresent ( entity -> { copyBeanToBean ( entity , form , copyOp -> { copyOp . excludeNull ( ) ; copyOp . exclude ( Constants . QUERIES ) ; } ) ; form . queries = stream ( entity . getQueries ( ) ) . get ( stream -> stream . filter ( StringUtil :: isNotBlank ) . collect ( Collectors . joining ( "\n" ) ) ) ; form . crudMode = crudMode ; } ) . orElse ( ( ) -> { throwValidationError ( messages -> messages . addErrorsCrudCouldNotFindCrudTable ( GLOBAL , id ) , ( ) -> asListHtml ( ) ) ; } ) ; } ) ; } ) ;
public class MoreSwingUtilities { /** * Implementation of { @ link SwingUtilities # convertMouseEvent ( * Component , MouseEvent , Component ) } that properly sets the * button of the returned event . * ( See http : / / bugs . sun . com / bugdatabase / view _ bug . do ? bug _ id = 7181403 ) * @ param source The source component * @ param sourceEvent The source event * @ param destination The destination component * @ return The resulting mouse event */ public static MouseEvent convertMouseEvent ( Component source , MouseEvent sourceEvent , Component destination ) { } }
Point p = SwingUtilities . convertPoint ( source , sourceEvent . getPoint ( ) , destination ) ; Component newSource = source ; if ( destination != null ) { newSource = destination ; } MouseEvent newEvent = null ; if ( sourceEvent instanceof MouseWheelEvent ) { MouseWheelEvent sourceWheelEvent = ( MouseWheelEvent ) sourceEvent ; newEvent = new MouseWheelEvent ( newSource , sourceWheelEvent . getID ( ) , sourceWheelEvent . getWhen ( ) , sourceWheelEvent . getModifiersEx ( ) , p . x , p . y , sourceWheelEvent . getXOnScreen ( ) , sourceWheelEvent . getYOnScreen ( ) , sourceWheelEvent . getClickCount ( ) , sourceWheelEvent . isPopupTrigger ( ) , sourceWheelEvent . getScrollType ( ) , sourceWheelEvent . getScrollAmount ( ) , sourceWheelEvent . getWheelRotation ( ) ) ; } else if ( sourceEvent instanceof MenuDragMouseEvent ) { MenuDragMouseEvent sourceMenuDragEvent = ( MenuDragMouseEvent ) sourceEvent ; newEvent = new MenuDragMouseEvent ( newSource , sourceMenuDragEvent . getID ( ) , sourceMenuDragEvent . getWhen ( ) , sourceMenuDragEvent . getModifiersEx ( ) , p . x , p . y , sourceMenuDragEvent . getXOnScreen ( ) , sourceMenuDragEvent . getYOnScreen ( ) , sourceMenuDragEvent . getClickCount ( ) , sourceMenuDragEvent . isPopupTrigger ( ) , sourceMenuDragEvent . getPath ( ) , sourceMenuDragEvent . getMenuSelectionManager ( ) ) ; } else { newEvent = new MouseEvent ( newSource , sourceEvent . getID ( ) , sourceEvent . getWhen ( ) , sourceEvent . getModifiersEx ( ) , p . x , p . y , sourceEvent . getXOnScreen ( ) , sourceEvent . getYOnScreen ( ) , sourceEvent . getClickCount ( ) , sourceEvent . isPopupTrigger ( ) , sourceEvent . getButton ( ) ) ; } return newEvent ;
public class PutRecordsRequest { /** * The records associated with the request . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setRecords ( java . util . Collection ) } or { @ link # withRecords ( java . util . Collection ) } if you want to override * the existing values . * @ param records * The records associated with the request . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutRecordsRequest withRecords ( PutRecordsRequestEntry ... records ) { } }
if ( this . records == null ) { setRecords ( new com . amazonaws . internal . SdkInternalList < PutRecordsRequestEntry > ( records . length ) ) ; } for ( PutRecordsRequestEntry ele : records ) { this . records . add ( ele ) ; } return this ;
public class PipelineInterpreter { /** * the initialStreamIds are not mutated , but are begin passed for efficiency , as they are being used later in # process ( ) */ private ImmutableSet < Pipeline > selectPipelines ( InterpreterListener interpreterListener , Set < Tuple2 < String , String > > processingBlacklist , Message message , Set < String > initialStreamIds , ImmutableSetMultimap < String , Pipeline > streamConnection ) { } }
final String msgId = message . getId ( ) ; // if a message - stream combination has already been processed ( is in the set ) , skip that execution final Set < String > streamsIds = initialStreamIds . stream ( ) . filter ( streamId -> ! processingBlacklist . contains ( tuple ( msgId , streamId ) ) ) . filter ( streamConnection :: containsKey ) . collect ( Collectors . toSet ( ) ) ; final ImmutableSet < Pipeline > pipelinesToRun = streamsIds . stream ( ) . flatMap ( streamId -> streamConnection . get ( streamId ) . stream ( ) ) . collect ( ImmutableSet . toImmutableSet ( ) ) ; interpreterListener . processStreams ( message , pipelinesToRun , streamsIds ) ; log . debug ( "[{}] running pipelines {} for streams {}" , msgId , pipelinesToRun , streamsIds ) ; return pipelinesToRun ;
public class JsonPropertyExpander { /** * Populates the collection property of the entity with field values . * @ param entity the entity * @ param keySet the set of entity properties * @ param property the collection property * @ param field the Java field * @ param node the current node * @ param map the map of field values * @ throws ODataException If unable to fill collection properties */ public void fillCollectionProperty ( Object entity , Set < String > keySet , StructuralProperty property , Field field , String node , Map < String , Object > map ) throws ODataException { } }
for ( String target : keySet ) { if ( node . equalsIgnoreCase ( target ) ) { Iterable subValues = ( Iterable ) map . get ( target ) ; List < Object > valueList = new ArrayList < > ( ) ; for ( Object subValue : subValues ) { Object value = getFieldValueByType ( property . getElementTypeName ( ) , subValue , map , true ) ; if ( value != null ) { valueList . add ( value ) ; } } setFieldValue ( field , entity , valueList ) ; break ; } }
public class JobStepsInner { /** * Gets the specified version of a job step . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent . * @ param jobName The name of the job . * @ param jobVersion The version of the job to get . * @ param stepName The name of the job step . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < JobStepInner > getByVersionAsync ( String resourceGroupName , String serverName , String jobAgentName , String jobName , int jobVersion , String stepName , final ServiceCallback < JobStepInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getByVersionWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , jobName , jobVersion , stepName ) , serviceCallback ) ;
public class IterableSubject { /** * Checks ( with a side - effect failure ) that the subject does not contain the supplied item . */ public final void doesNotContain ( @ NullableDecl Object element ) { } }
if ( Iterables . contains ( actual ( ) , element ) ) { failWithActual ( "expected not to contain" , element ) ; }
public class MutablePeriod { /** * Adds to each field of this period . * @ param years amount of years to add to this period , which must be zero if unsupported * @ param months amount of months to add to this period , which must be zero if unsupported * @ param weeks amount of weeks to add to this period , which must be zero if unsupported * @ param days amount of days to add to this period , which must be zero if unsupported * @ param hours amount of hours to add to this period , which must be zero if unsupported * @ param minutes amount of minutes to add to this period , which must be zero if unsupported * @ param seconds amount of seconds to add to this period , which must be zero if unsupported * @ param millis amount of milliseconds to add to this period , which must be zero if unsupported * @ throws IllegalArgumentException if the period being added contains a field * not supported by this period * @ throws ArithmeticException if the addition exceeds the capacity of the period */ public void add ( int years , int months , int weeks , int days , int hours , int minutes , int seconds , int millis ) { } }
setPeriod ( FieldUtils . safeAdd ( getYears ( ) , years ) , FieldUtils . safeAdd ( getMonths ( ) , months ) , FieldUtils . safeAdd ( getWeeks ( ) , weeks ) , FieldUtils . safeAdd ( getDays ( ) , days ) , FieldUtils . safeAdd ( getHours ( ) , hours ) , FieldUtils . safeAdd ( getMinutes ( ) , minutes ) , FieldUtils . safeAdd ( getSeconds ( ) , seconds ) , FieldUtils . safeAdd ( getMillis ( ) , millis ) ) ;
public class WrappedByteBuffer { /** * Skips the specified number of bytes from the current position . * @ param size the number of bytes to skip * @ return the buffer */ public WrappedByteBuffer skip ( int size ) { } }
_autoExpand ( size ) ; _buf . position ( _buf . position ( ) + size ) ; return this ;
public class ComicChatOverlay { /** * Create a tail to the specified rectangular area from the speaker point . */ protected Shape getTail ( int type , Rectangle r , Point speaker ) { } }
// emotes don ' t actually have tails if ( ChatLogic . modeOf ( type ) == ChatLogic . EMOTE ) { return new Area ( ) ; // empty shape } int midx = r . x + ( r . width / 2 ) ; int midy = r . y + ( r . height / 2 ) ; // we actually want to start about SPEAKER _ DISTANCE away from the // speaker int xx = speaker . x - midx ; int yy = speaker . y - midy ; float dist = ( float ) Math . sqrt ( xx * xx + yy * yy ) ; float perc = ( dist - SPEAKER_DISTANCE ) / dist ; if ( ChatLogic . modeOf ( type ) == ChatLogic . THINK ) { int steps = Math . max ( ( int ) ( dist / SPEAKER_DISTANCE ) , 2 ) ; float step = perc / steps ; Area a = new Area ( ) ; for ( int ii = 0 ; ii < steps ; ii ++ , perc -= step ) { int radius = Math . min ( SPEAKER_DISTANCE / 2 - 1 , ii + 2 ) ; a . add ( new Area ( new Ellipse2D . Float ( ( int ) ( ( 1 - perc ) * midx + perc * speaker . x ) + perc * radius , ( int ) ( ( 1 - perc ) * midy + perc * speaker . y ) + perc * radius , radius * 2 , radius * 2 ) ) ) ; } return a ; } // ELSE draw a triangular tail shape Polygon p = new Polygon ( ) ; p . addPoint ( ( int ) ( ( 1 - perc ) * midx + perc * speaker . x ) , ( int ) ( ( 1 - perc ) * midy + perc * speaker . y ) ) ; if ( Math . abs ( speaker . x - midx ) > Math . abs ( speaker . y - midy ) ) { int x ; if ( midx > speaker . x ) { x = r . x + PAD ; } else { x = r . x + r . width - PAD ; } p . addPoint ( x , midy - ( TAIL_WIDTH / 2 ) ) ; p . addPoint ( x , midy + ( TAIL_WIDTH / 2 ) ) ; } else { int y ; if ( midy > speaker . y ) { y = r . y + PAD ; } else { y = r . y + r . height - PAD ; } p . addPoint ( midx - ( TAIL_WIDTH / 2 ) , y ) ; p . addPoint ( midx + ( TAIL_WIDTH / 2 ) , y ) ; } return p ;
public class Auth { /** * This method is called via a global method defined in AuthImpl . register ( ) */ @ SuppressWarnings ( "unused" ) void finish ( String hash ) { } }
TokenInfo info = new TokenInfo ( ) ; String error = null ; String errorDesc = "" ; String errorUri = "" ; // Iterate over keys and values in the string hash value to find relevant // information like the access token or an error message . The string will be // in the form of : # key1 = val1 & key2 = val2 & key3 = val3 ( etc . ) int idx = 1 ; while ( idx < hash . length ( ) - 1 ) { // Grab the next key ( between start and ' = ' ) int nextEq = hash . indexOf ( '=' , idx ) ; if ( nextEq < 0 ) { break ; } String key = hash . substring ( idx , nextEq ) ; // Grab the next value ( between ' = ' and ' & ' ) int nextAmp = hash . indexOf ( '&' , nextEq ) ; nextAmp = nextAmp < 0 ? hash . length ( ) : nextAmp ; String val = hash . substring ( nextEq + 1 , nextAmp ) ; // Start looking from here from now on . idx = nextAmp + 1 ; // Store relevant values to be used later . if ( key . equals ( "access_token" ) ) { info . setAccessToken ( val ) ; } else if ( key . equals ( "token_type" ) ) { info . setTokenType ( val ) ; } else if ( key . equals ( "expires_in" ) ) { // expires _ in is seconds , convert to milliseconds and add to now Double expiresIn = Double . valueOf ( val ) * 1000 ; info . setExpires ( String . valueOf ( clock . now ( ) + expiresIn ) ) ; } else if ( key . equals ( "error" ) ) { error = val ; } else if ( key . equals ( "error_description" ) ) { errorDesc = " (" + val + ")" ; } else if ( key . equals ( "error_uri" ) ) { errorUri = "; see: " + val ; } } if ( error != null ) { lastCallback . onFailure ( new RuntimeException ( "Error from provider: " + error + errorDesc + errorUri ) ) ; } else if ( info . getAccessToken ( ) == null ) { lastCallback . onFailure ( new RuntimeException ( "Could not find access_token in hash " + hash ) ) ; } else { setToken ( lastRequest , info ) ; lastCallback . onSuccess ( info ) ; }
public class QrCodePositionPatternDetector { /** * Detects position patterns inside the image and forms a graph . * @ param gray Gray scale input image * @ param binary Thresholed version of gray image . */ public void process ( T gray , GrayU8 binary ) { } }
configureContourDetector ( gray ) ; recycleData ( ) ; positionPatterns . reset ( ) ; interpolate . setImage ( gray ) ; // detect squares squareDetector . process ( gray , binary ) ; long time0 = System . nanoTime ( ) ; squaresToPositionList ( ) ; long time1 = System . nanoTime ( ) ; // Create graph of neighboring squares createPositionPatternGraph ( ) ; // long time2 = System . nanoTime ( ) ; / / doesn ' t take very long double milli = ( time1 - time0 ) * 1e-6 ; milliGraph . update ( milli ) ; if ( profiler ) { DetectPolygonFromContour < T > detectorPoly = squareDetector . getDetector ( ) ; System . out . printf ( " contour %5.1f shapes %5.1f adjust_bias %5.2f PosPat %6.2f" , detectorPoly . getMilliContour ( ) , detectorPoly . getMilliShapes ( ) , squareDetector . getMilliAdjustBias ( ) , milliGraph . getAverage ( ) ) ; }
public class ClassWriter { /** * where */ void writeStackMapType ( Type t ) { } }
if ( t == null ) { if ( debugstackmap ) System . out . print ( "empty" ) ; databuf . appendByte ( 0 ) ; } else switch ( t . getTag ( ) ) { case BYTE : case CHAR : case SHORT : case INT : case BOOLEAN : if ( debugstackmap ) System . out . print ( "int" ) ; databuf . appendByte ( 1 ) ; break ; case FLOAT : if ( debugstackmap ) System . out . print ( "float" ) ; databuf . appendByte ( 2 ) ; break ; case DOUBLE : if ( debugstackmap ) System . out . print ( "double" ) ; databuf . appendByte ( 3 ) ; break ; case LONG : if ( debugstackmap ) System . out . print ( "long" ) ; databuf . appendByte ( 4 ) ; break ; case BOT : // null if ( debugstackmap ) System . out . print ( "null" ) ; databuf . appendByte ( 5 ) ; break ; case CLASS : case ARRAY : if ( debugstackmap ) System . out . print ( "object(" + t + ")" ) ; databuf . appendByte ( 7 ) ; databuf . appendChar ( pool . put ( t ) ) ; break ; case TYPEVAR : if ( debugstackmap ) System . out . print ( "object(" + types . erasure ( t ) . tsym + ")" ) ; databuf . appendByte ( 7 ) ; databuf . appendChar ( pool . put ( types . erasure ( t ) . tsym ) ) ; break ; case UNINITIALIZED_THIS : if ( debugstackmap ) System . out . print ( "uninit_this" ) ; databuf . appendByte ( 6 ) ; break ; case UNINITIALIZED_OBJECT : { UninitializedType uninitType = ( UninitializedType ) t ; databuf . appendByte ( 8 ) ; if ( debugstackmap ) System . out . print ( "uninit_object@" + uninitType . offset ) ; databuf . appendChar ( uninitType . offset ) ; } break ; default : throw new AssertionError ( ) ; }
public class TitlePaneButtonForegroundPainter { /** * Paint the enabled state of the button foreground . * @ param g the Graphics2D context to paint with . * @ param c the button to paint . * @ param width the width to paint . * @ param height the height to paint . */ public void paintEnabled ( Graphics2D g , JComponent c , int width , int height ) { } }
paint ( g , c , width , height , enabledBorder , enabledCorner , enabledInterior ) ;
public class BrowsersDataProvider { /** * Get unique browsers available in a selenium grid . * @ param context context * @ param testConstructor testConstructor * @ return an iterator * @ throws Exception exception */ @ DataProvider ( parallel = true ) public static Iterator < String [ ] > availableUniqueBrowsers ( ITestContext context , Constructor < ? > testConstructor ) throws Exception { } }
Map < String , String > map = new HashMap < String , String > ( ) ; List < String > browsers = gridBrowsers ( map ) ; HashSet < String > hs = new HashSet < String > ( ) ; hs . addAll ( browsers ) ; browsers . clear ( ) ; browsers . addAll ( hs ) ; return buildIterator ( browsers ) ;
public class DescribeServiceErrorsResult { /** * An array of < code > ServiceError < / code > objects that describe the specified service errors . * @ param serviceErrors * An array of < code > ServiceError < / code > objects that describe the specified service errors . */ public void setServiceErrors ( java . util . Collection < ServiceError > serviceErrors ) { } }
if ( serviceErrors == null ) { this . serviceErrors = null ; return ; } this . serviceErrors = new com . amazonaws . internal . SdkInternalList < ServiceError > ( serviceErrors ) ;
public class SerializationUtils { /** * Decode and deserialize object t . * @ param < T > the type parameter * @ param object the object * @ param cipher the cipher * @ param type the type * @ return the t */ @ SneakyThrows public static < T extends Serializable > T decodeAndDeserializeObject ( final byte [ ] object , final CipherExecutor cipher , final Class < T > type ) { } }
return decodeAndDeserializeObject ( object , cipher , type , ArrayUtils . EMPTY_OBJECT_ARRAY ) ;
public class DecimalFormat { /** * Sets the < tt > Currency Usage < / tt > object used to display currency . * This takes effect immediately , if this format is a * currency format . * @ param newUsage new currency context object to use . */ public void setCurrencyUsage ( CurrencyUsage newUsage ) { } }
if ( newUsage == null ) { throw new NullPointerException ( "return value is null at method AAA" ) ; } currencyUsage = newUsage ; Currency theCurrency = this . getCurrency ( ) ; // We set rounding / digit based on currency context if ( theCurrency != null ) { setRoundingIncrement ( theCurrency . getRoundingIncrement ( currencyUsage ) ) ; int d = theCurrency . getDefaultFractionDigits ( currencyUsage ) ; setMinimumFractionDigits ( d ) ; _setMaximumFractionDigits ( d ) ; }
public class WhiteboxImpl { /** * Sets the field . * @ param object the object * @ param value the value * @ param foundField the found field */ private static void setField ( Object object , Object value , Field foundField ) { } }
foundField . setAccessible ( true ) ; try { int fieldModifiersMask = foundField . getModifiers ( ) ; removeFinalModifierIfPresent ( foundField ) ; foundField . set ( object , value ) ; restoreModifiersToFieldIfChanged ( fieldModifiersMask , foundField ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Internal error: Failed to set field in method setInternalState." , e ) ; }
public class ExpressionArithmetic { /** * A VoltDB extension to use X ' . . ' as a numeric value */ private void voltConvertBinaryLiteralOperandsToBigint ( ) { } }
// Strange that CONCAT is an arithmetic operator . // You could imagine using it for VARBINARY , so // definitely don ' t convert its operands to BIGINT ! assert ( opType != OpTypes . CONCAT ) ; for ( int i = 0 ; i < nodes . length ; ++ i ) { Expression e = nodes [ i ] ; ExpressionValue . voltMutateToBigintType ( e , this , i ) ; }
public class HttpRequest { /** * Set the request content . * @ param content the request content * @ return this HttpRequest */ public HttpRequest withBody ( String content ) { } }
final byte [ ] bodyBytes = content . getBytes ( StandardCharsets . UTF_8 ) ; return withBody ( bodyBytes ) ;
public class CmsDisplayTypeSelectWidget { /** * Updates the select options from the given entity . < p > * @ param entity a top - level content entity */ public void update ( CmsEntity entity ) { } }
String filterType = NO_FILTER ; if ( m_matchTypes ) { List < Object > values = CmsEntity . getValuesForPath ( entity , m_valuePath ) ; if ( values . size ( ) > 1 ) { String firstValue = ( String ) values . get ( 0 ) ; CmsPair < String , String > val = m_options . get ( firstValue ) ; if ( val != null ) { filterType = val . getSecond ( ) ; } } } if ( ! filterType . equals ( m_filterType ) ) { boolean noFilter = NO_FILTER . equals ( filterType ) ; Map < String , String > items = new LinkedHashMap < String , String > ( ) ; // add empty option items . put ( "" , m_emptyLabel ) ; for ( Entry < String , CmsPair < String , String > > optEntry : m_options . entrySet ( ) ) { if ( noFilter || filterType . equals ( optEntry . getValue ( ) . getSecond ( ) ) ) { items . put ( optEntry . getKey ( ) , optEntry . getValue ( ) . getFirst ( ) ) ; } } replaceItems ( items ) ; } m_filterType = filterType ;
public class Journal { /** * setter for impactFactor - sets The impact factor of the journal at the time of publication , O * @ generated * @ param v value to set into the feature */ public void setImpactFactor ( String v ) { } }
if ( Journal_Type . featOkTst && ( ( Journal_Type ) jcasType ) . casFeat_impactFactor == null ) jcasType . jcas . throwFeatMissing ( "impactFactor" , "de.julielab.jules.types.Journal" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Journal_Type ) jcasType ) . casFeatCode_impactFactor , v ) ;
public class UriUtils { /** * Creates a new URL string from the specified base URL and parameters . * @ param baseUrl The base URL excluding parameters . * @ param params The parameters . * @ return The full URL string . */ public static String newUrl ( final String baseUrl , final Collection < Pair < String , String > > params ) { } }
final StringBuilder sb = new StringBuilder ( ) ; sb . append ( baseUrl ) ; sb . append ( getUrlParameters ( params , true ) ) ; return sb . toString ( ) ;
public class GVRTransform { /** * Modify the tranform ' s current rotation in quaternion terms . * @ param w * ' W ' component of the quaternion . * @ param x * ' X ' component of the quaternion . * @ param y * ' Y ' component of the quaternion . * @ param z * ' Z ' component of the quaternion . */ public void rotate ( float w , float x , float y , float z ) { } }
NativeTransform . rotate ( getNative ( ) , w , x , y , z ) ;
public class GroupLayout { /** * Creates a { @ link JPanel } that is configured with an { @ link * HGroupLayout } with the specified on - axis policy , justification and * off - axis policy ( default configuration otherwise ) . */ public static JPanel makeHBox ( Policy policy , Justification justification , Policy offAxisPolicy ) { } }
return new JPanel ( new HGroupLayout ( policy , offAxisPolicy , DEFAULT_GAP , justification ) ) ;
public class LongTermRetentionBackupsInner { /** * Lists all long term retention backups for a database . * @ param locationName The location of the database * @ param longTermRetentionServerName the String value * @ param longTermRetentionDatabaseName the String value * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; LongTermRetentionBackupInner & gt ; object */ public Observable < ServiceResponse < Page < LongTermRetentionBackupInner > > > listByDatabaseWithServiceResponseAsync ( final String locationName , final String longTermRetentionServerName , final String longTermRetentionDatabaseName ) { } }
return listByDatabaseSinglePageAsync ( locationName , longTermRetentionServerName , longTermRetentionDatabaseName ) . concatMap ( new Func1 < ServiceResponse < Page < LongTermRetentionBackupInner > > , Observable < ServiceResponse < Page < LongTermRetentionBackupInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < LongTermRetentionBackupInner > > > call ( ServiceResponse < Page < LongTermRetentionBackupInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByDatabaseNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class SU { /** * Returns a { @ link VirtualChannel } that ' s connected to the privilege - escalated environment . * @ param listener * What this method is doing ( such as what process it ' s invoking ) will be sent here . * @ return * Never null . This may represent a channel to a separate JVM , or just { @ link LocalChannel } . * Close this channel and the SU environment will be shut down . */ public static VirtualChannel start ( final TaskListener listener , final String rootUsername , final String rootPassword ) throws IOException , InterruptedException { } }
if ( File . pathSeparatorChar == ';' ) // on Windows return newLocalChannel ( ) ; // TODO : perhaps use RunAs to run as an Administrator ? String os = Util . fixNull ( System . getProperty ( "os.name" ) ) ; if ( os . equals ( "Linux" ) ) return new UnixSu ( ) { protected String sudoExe ( ) { return "sudo" ; } protected Process sudoWithPass ( ArgumentListBuilder args ) throws IOException { args . prepend ( sudoExe ( ) , "-S" ) ; listener . getLogger ( ) . println ( "$ " + Util . join ( args . toList ( ) , " " ) ) ; ProcessBuilder pb = new ProcessBuilder ( args . toCommandArray ( ) ) ; Process p = pb . start ( ) ; // TODO : use - p to detect prompt // TODO : detect if the password didn ' t work PrintStream ps = new PrintStream ( p . getOutputStream ( ) ) ; ps . println ( rootPassword ) ; ps . println ( rootPassword ) ; ps . println ( rootPassword ) ; return p ; } } . start ( listener , rootPassword ) ; if ( os . equals ( "SunOS" ) ) return new UnixSu ( ) { protected String sudoExe ( ) { return "/usr/bin/pfexec" ; } protected Process sudoWithPass ( ArgumentListBuilder args ) throws IOException { listener . getLogger ( ) . println ( "Running with embedded_su" ) ; ProcessBuilder pb = new ProcessBuilder ( args . prepend ( sudoExe ( ) ) . toCommandArray ( ) ) ; return EmbeddedSu . startWithSu ( rootUsername , rootPassword , pb ) ; } // in solaris , pfexec never asks for a password , so username = = null means // we won ' t be using password . this helps disambiguate empty password } . start ( listener , rootUsername == null ? null : rootPassword ) ; // TODO : Mac ? // unsupported platform , take a chance return newLocalChannel ( ) ;
public class GImageBandMath { /** * Computes the minimum for each pixel across specified bands in the { @ link Planar } image . * @ param input Planar image * @ param output Gray scale image containing minimum pixel values * @ param startBand First band to be considered * @ param lastBand Last band ( inclusive ) to be considered */ public static < T extends ImageGray < T > > void minimum ( Planar < T > input , T output , int startBand , int lastBand ) { } }
if ( GrayU8 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayU8 > ) input , ( GrayU8 ) output , startBand , lastBand ) ; } else if ( GrayU16 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayU16 > ) input , ( GrayU16 ) output , startBand , lastBand ) ; } else if ( GrayS16 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayS16 > ) input , ( GrayS16 ) output , startBand , lastBand ) ; } else if ( GrayS32 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayS32 > ) input , ( GrayS32 ) output , startBand , lastBand ) ; } else if ( GrayS64 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayS64 > ) input , ( GrayS64 ) output , startBand , lastBand ) ; } else if ( GrayF32 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayF32 > ) input , ( GrayF32 ) output , startBand , lastBand ) ; } else if ( GrayF64 . class == input . getBandType ( ) ) { ImageBandMath . minimum ( ( Planar < GrayF64 > ) input , ( GrayF64 ) output , startBand , lastBand ) ; } else { throw new IllegalArgumentException ( "Unknown image Type: " + input . getBandType ( ) . getSimpleName ( ) ) ; }
public class TextUtils { /** * Converts String array with waypoint _ names values * to a string ready for API consumption . * @ param waypointNames a string representing approaches to each coordinate . * @ return a formatted string . * @ since 3.3.0 */ public static String formatWaypointNames ( String [ ] waypointNames ) { } }
for ( int i = 0 ; i < waypointNames . length ; i ++ ) { if ( waypointNames [ i ] == null ) { waypointNames [ i ] = "" ; } } return TextUtils . join ( ";" , waypointNames ) ;
public class ProcessExecutorImpl { /** * / / / / / other */ private void cancelTasksOfProcessInstance ( ProcessInstance procInst ) throws NamingException , JMSException , SQLException , ServiceLocatorException , MdwException { } }
List < ProcessInstance > processInstanceList = edao . getChildProcessInstances ( procInst . getId ( ) ) ; List < Long > procInstIds = new ArrayList < Long > ( ) ; procInstIds . add ( procInst . getId ( ) ) ; for ( ProcessInstance pi : processInstanceList ) { Process pidef = getProcessDefinition ( pi ) ; if ( pidef . isEmbeddedProcess ( ) ) procInstIds . add ( pi . getId ( ) ) ; } TaskServices taskServices = ServiceLocator . getTaskServices ( ) ; for ( Long procInstId : procInstIds ) { taskServices . cancelTaskInstancesForProcess ( procInstId ) ; }
public class SortArgs { /** * Retrieve external keys during sort . { @ literal GET } supports { @ code # } and { @ code * } wildcards . * @ param pattern must not be { @ literal null } . * @ return { @ code this } { @ link SortArgs } . */ public SortArgs get ( String pattern ) { } }
LettuceAssert . notNull ( pattern , "Pattern must not be null" ) ; if ( get == null ) { get = new ArrayList < > ( ) ; } get . add ( pattern ) ; return this ;
public class InternalInputStreamManager { /** * This method creates a Stream if this is the first * inbound message which has been sent to remote Mes * @ param jsMsg * @ throws SIResourceException * @ throws SICoreException */ public void processMessage ( JsMessage jsMsg ) throws SIResourceException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "processMessage" , new Object [ ] { jsMsg } ) ; int priority = jsMsg . getPriority ( ) . intValue ( ) ; Reliability reliability = jsMsg . getReliability ( ) ; SIBUuid12 streamID = jsMsg . getGuaranteedStreamUUID ( ) ; StreamSet streamSet = getStreamSet ( streamID , true ) ; InternalInputStream internalInputStream = null ; synchronized ( streamSet ) { internalInputStream = ( InternalInputStream ) streamSet . getStream ( priority , reliability ) ; // This may be the first message which has required this stream if ( internalInputStream == null && ( reliability . compareTo ( Reliability . BEST_EFFORT_NONPERSISTENT ) > 0 ) ) { internalInputStream = createStream ( streamSet , priority , reliability ) ; } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processMessage" ) ;
public class JSDocInfo { /** * Declares a parameter . Parameters are described using the { @ code @ param } * annotation . * @ param jsType the parameter ' s type , it may be { @ code null } when the * { @ code @ param } annotation did not specify a type . * @ param parameter the parameter ' s name */ boolean declareParam ( JSTypeExpression jsType , String parameter ) { } }
lazyInitInfo ( ) ; if ( info . parameters == null ) { info . parameters = new LinkedHashMap < > ( ) ; } if ( ! info . parameters . containsKey ( parameter ) ) { info . parameters . put ( parameter , jsType ) ; return true ; } else { return false ; }
public class WriteClass { /** * Write the includes * @ param codeType Target method types */ public void writeIncludes ( CodeType codeType ) { } }
ClassInfo recClassInfo2 = null ; ClassFields recClassFields = null ; try { ClassInfo recClassInfo = ( ClassInfo ) this . getMainRecord ( ) ; String classType = recClassInfo . getField ( ClassInfo . CLASS_TYPE ) . toString ( ) ; String strPackage = this . getPackage ( codeType ) ; m_StreamOut . writeit ( "package " + strPackage + ";\n\n" ) ; m_IncludeNameList . addName ( strPackage ) ; // Don ' t include this ! ! ! if ( "interface" . equalsIgnoreCase ( classType ) ) codeType = CodeType . INTERFACE ; if ( codeType == CodeType . THICK ) { // m _ StreamOut . writeit ( " import java . awt . * ; \ n " ) ; / / j Temp m_StreamOut . writeit ( "import java.util.*;\n\n" ) ; // j Temp m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.db" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "thin.base.util" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "thin.base.db" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.db.event" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.db.filter" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.field" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.field.convert" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.field.event" ) ; // Don ' t include this ! ! ! if ( ( "Screen" . equalsIgnoreCase ( classType ) ) || ( "Report" . equalsIgnoreCase ( classType ) ) ) { m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.screen.model" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.screen.model.util" ) ; // Don ' t include this ! ! ! } m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.model" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "base.util" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "model" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "model.db" ) ; // Don ' t include this ! ! ! m_IncludeNameList . addPackage ( DBConstants . ROOT_PACKAGE + "model.screen" ) ; // Don ' t include this ! ! ! } // Now write the include files for any base classes not in this file or fields with class defs not in file recClassInfo2 = new ClassInfo ( this ) ; recClassFields = new ClassFields ( this ) ; recClassFields . setKeyArea ( ClassFields . CLASS_INFO_CLASS_NAME_KEY ) ; SubFileFilter fileBehavior2 = new SubFileFilter ( recClassInfo2 . getField ( ClassInfo . CLASS_NAME ) , ClassFields . CLASS_INFO_CLASS_NAME , null , null , null , null ) ; recClassFields . addListener ( fileBehavior2 ) ; // Only read through the class fields String strFileName = recClassInfo . getField ( ClassInfo . CLASS_SOURCE_FILE ) . toString ( ) ; recClassInfo2 . setKeyArea ( ClassInfo . CLASS_SOURCE_FILE_KEY ) ; StringSubFileFilter fileBehavior = new StringSubFileFilter ( strFileName , ClassInfo . CLASS_SOURCE_FILE , null , null , null , null ) ; recClassInfo2 . addListener ( fileBehavior ) ; // Only select records which match m _ strFileName recClassInfo2 . setKeyArea ( ClassInfo . CLASS_SOURCE_FILE_KEY ) ; recClassInfo2 . close ( ) ; while ( recClassInfo2 . hasNext ( ) ) { recClassInfo2 . next ( ) ; if ( ( codeType == CodeType . THIN ) && ( ! recClassInfo2 . isARecord ( false ) ) ) continue ; // if ( ( codeType = = CodeType . INTERFACE ) & & ( ! recClassInfo2 . isARecord ( false ) ) ) // continue ; String strBaseRecordClass = recClassInfo2 . getField ( ClassInfo . BASE_CLASS_NAME ) . getString ( ) ; if ( codeType == CodeType . THICK ) m_IncludeNameList . addInclude ( strBaseRecordClass , null ) ; // Include the base class if it isn ' t in this file recClassFields . close ( ) ; while ( recClassFields . hasNext ( ) ) { recClassFields . next ( ) ; if ( ! ( ( IncludeScopeField ) recClassFields . getField ( ClassFields . INCLUDE_SCOPE ) ) . includeThis ( codeType , false ) ) continue ; String strFieldName = recClassFields . getField ( ClassFields . CLASS_FIELD_NAME ) . getString ( ) ; String strFieldClass = recClassFields . getField ( ClassFields . CLASS_FIELD_CLASS ) . getString ( ) ; String strReference = "" ; String strClassFieldType = recClassFields . getField ( ClassFields . CLASS_FIELDS_TYPE ) . toString ( ) ; if ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_PACKAGE ) ) { if ( strFieldClass . length ( ) > 0 ) if ( strFieldClass . charAt ( 0 ) == '.' ) { ClassProject classProject = ( ClassProject ) ( ( ReferenceField ) recClassInfo2 . getField ( ClassInfo . CLASS_PROJECT_ID ) ) . getReference ( ) ; if ( ( classProject != null ) && ( ( classProject . getEditMode ( ) == DBConstants . EDIT_CURRENT ) || ( classProject . getEditMode ( ) == DBConstants . EDIT_IN_PROGRESS ) ) ) { CodeType codeType2 = CodeType . THICK ; if ( strFieldClass . startsWith ( ".thin" ) ) codeType2 = CodeType . THIN ; if ( strFieldClass . startsWith ( ".res" ) ) codeType2 = CodeType . RESOURCE_CODE ; strFieldClass = classProject . getFullPackage ( CodeType . THICK , strFieldClass ) ; if ( codeType2 != CodeType . THICK ) { int end = strFieldClass . indexOf ( codeType2 == CodeType . THIN ? ".thin" : ".res" ) ; int start = strFieldClass . indexOf ( '.' ) ; if ( start != - 1 ) start = strFieldClass . indexOf ( '.' , start + 1 ) ; if ( start != - 1 ) strFieldClass = strFieldClass . substring ( 0 , start ) + strFieldClass . substring ( end ) ; } } else strFieldClass = DBConstants . ROOT_PACKAGE + strFieldClass . substring ( 1 ) ; } m_IncludeNameList . addPackage ( strFieldClass ) ; } else if ( ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_CLASS_PACKAGE ) ) || ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_MODEL_PACKAGE ) ) // For now || ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_RES_PACKAGE ) ) // For now || ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_THIN_PACKAGE ) ) // For now || ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . CLASS_FIELD ) ) // For now || ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_CLASS ) ) ) { CodeType codeType3 = CodeType . THICK ; if ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_MODEL_PACKAGE ) ) codeType3 = CodeType . INTERFACE ; if ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_RES_PACKAGE ) ) // For now codeType3 = CodeType . RESOURCE_CODE ; if ( strClassFieldType . equalsIgnoreCase ( ClassFieldsTypeField . INCLUDE_THIN_PACKAGE ) ) // For now codeType3 = CodeType . THIN ; m_IncludeNameList . addInclude ( strFieldClass , codeType3 ) ; strReference = "Y" ; } if ( recClassFields . getField ( ClassFields . CLASS_FIELD_PROTECT ) . getString ( ) . equalsIgnoreCase ( "S" ) ) // Static , initialize now { if ( strReference . length ( ) == 0 ) strReference = "0" ; if ( strReference . charAt ( 0 ) == 'Y' ) strReference = "null" ; else { strReference = recClassFields . getField ( ClassFields . CLASS_FIELD_INITIAL ) . getString ( ) ; if ( strReference . length ( ) == 0 ) strReference = "0" ; } if ( ! strReference . equals ( "(none)" ) ) m_StreamOut . writeit ( strFieldClass + " " + strFieldName + " = " + strReference + ";\n" ) ; } } recClassFields . close ( ) ; } // End of write record method ( s ) loop recClassInfo2 . removeListener ( fileBehavior , true ) ; recClassInfo2 . close ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { if ( recClassInfo2 != null ) recClassInfo2 . free ( ) ; if ( recClassFields != null ) recClassFields . free ( ) ; }
public class DFSEvaluatorLogOverwriteReaderWriter { /** * Closes the FileSystem . * @ throws Exception */ @ Override public synchronized void close ( ) throws Exception { } }
if ( this . fileSystem != null && ! this . fsClosed ) { this . fileSystem . close ( ) ; this . fsClosed = true ; }
public class SharedTorrent { /** * Bit field availability handler . * Handle updates in piece availability from a peer ' s BITFIELD message . * When this happens , we need to mark in all the pieces the peer has that * they can be reached through this peer , thus augmenting the global * availability of pieces . * @ param peer The peer we got the update from . * @ param availablePieces The pieces availability bit field of the peer . */ @ Override public void handleBitfieldAvailability ( SharingPeer peer , BitSet availablePieces ) { } }
// Determine if the peer is interesting for us or not , and notify it . BitSet interesting = ( BitSet ) availablePieces . clone ( ) ; synchronized ( this ) { interesting . andNot ( this . completedPieces ) ; interesting . andNot ( this . requestedPieces ) ; } // Record the peer has all the pieces it told us it had . for ( int i = availablePieces . nextSetBit ( 0 ) ; i >= 0 ; i = availablePieces . nextSetBit ( i + 1 ) ) { this . pieces [ i ] . seenAt ( peer ) ; } if ( interesting . cardinality ( ) == 0 ) { peer . notInteresting ( ) ; } else { peer . interesting ( ) ; } logger . debug ( "Peer {} contributes {} piece(s), total pieces count: {}." , new Object [ ] { peer , availablePieces . cardinality ( ) , myTorrentMetadata . getPiecesCount ( ) } ) ;
public class HTMLUtil { /** * Restrict HTML from the specified string except for the specified * regular expressions . */ public static String restrictHTML ( String src , String [ ] regexes ) { } }
if ( StringUtil . isBlank ( src ) ) { return src ; } ArrayList < String > list = new ArrayList < String > ( ) ; list . add ( src ) ; for ( String regexe : regexes ) { Pattern p = Pattern . compile ( regexe , Pattern . CASE_INSENSITIVE ) ; for ( int jj = 0 ; jj < list . size ( ) ; jj += 2 ) { String piece = list . get ( jj ) ; Matcher m = p . matcher ( piece ) ; if ( m . find ( ) ) { list . set ( jj , piece . substring ( 0 , m . start ( ) ) ) ; list . add ( jj + 1 , piece . substring ( m . start ( ) , m . end ( ) ) ) ; list . add ( jj + 2 , piece . substring ( m . end ( ) ) ) ; } } } // now , the even elements of list contain untrusted text , the // odd elements contain stuff that matched a regex StringBuilder buf = new StringBuilder ( ) ; for ( int jj = 0 , nn = list . size ( ) ; jj < nn ; jj ++ ) { String s = list . get ( jj ) ; if ( jj % 2 == 0 ) { s = s . replace ( "<" , "&lt;" ) ; s = s . replace ( ">" , "&gt;" ) ; } buf . append ( s ) ; } return buf . toString ( ) ;
public class ApiBase { /** * Gets the Dto instance from API . * @ param < T > Type on behalf of which the request is being called . * @ param classOfT Type on behalf of which the request is being called . * @ param methodKey Relevant method key . * @ param entityId Entity identifier . * @ param secondEntityId Entity identifier for the second entity . * @ return The Dto instance returned from API . * @ throws Exception */ protected < T extends Dto > T getObject ( Class < T > classOfT , String methodKey , String entityId , String secondEntityId ) throws Exception { } }
String urlMethod = String . format ( this . getRequestUrl ( methodKey ) , entityId , secondEntityId ) ; RestTool rest = new RestTool ( this . root , true ) ; T response = rest . request ( classOfT , null , urlMethod , this . getRequestType ( methodKey ) ) ; return response ;
public class SntpConnector { /** * / * [ deutsch ] * < p > Liefert die aktuelle Zeit in Mikrosekunden seit dem Beginn der * UNIX - Epoche , n & auml ; mlich [ 1970-01-01T00:00:00,00000Z ] . < / p > * < p > Es handelt sich immer um eine Zeitangabe ohne UTC - Schaltsekunden . < / p > * @ return count of microseconds since UNIX - epoch without leap seconds * @ since 2.1 */ public long currentTimeInMicros ( ) { } }
if ( ! this . isRunning ( ) ) { Moment m = this . currentTime ( ) ; return ( m . getPosixTime ( ) * MIO + m . getNanosecond ( ) / 1000 ) ; } long micros = SystemClock . MONOTONIC . currentTimeInMicros ( ) ; return ( micros + this . getLastOffset ( micros ) ) ;
public class JPAEnabledManager { /** * Perform a select , without a transaction * @ param aCallable * The callable * @ return The return of the callable or < code > null < / code > upon success * @ param < T > * The return type of the callable */ @ Nonnull public static final < T > JPAExecutionResult < T > doSelectStatic ( @ Nonnull final Callable < T > aCallable ) { } }
ValueEnforcer . notNull ( aCallable , "Callable" ) ; final StopWatch aSW = StopWatch . createdStarted ( ) ; try { // Call callback final T ret = aCallable . call ( ) ; s_aStatsCounterSuccess . increment ( ) ; s_aStatsTimerExecutionSuccess . addTime ( aSW . stopAndGetMillis ( ) ) ; return JPAExecutionResult . createSuccess ( ret ) ; } catch ( final Exception ex ) { s_aStatsCounterError . increment ( ) ; s_aStatsTimerExecutionError . addTime ( aSW . stopAndGetMillis ( ) ) ; _invokeCustomExceptionCallback ( ex ) ; return JPAExecutionResult . < T > createFailure ( ex ) ; } finally { if ( isDefaultExecutionWarnTimeEnabled ( ) ) if ( aSW . getMillis ( ) > getDefaultExecutionWarnTime ( ) ) onExecutionTimeExceeded ( "Execution of select took too long: " + aCallable . toString ( ) , aSW . getMillis ( ) ) ; }
public class UnixUserGroupInformation { /** * Store the given < code > ugi < / code > as a comma separated string in * < code > conf < / code > as a property < code > attr < / code > * The String starts with the user name followed by the default group names , * and other group names . * @ param conf configuration * @ param attr property name * @ param ugi a UnixUserGroupInformation */ public static void saveToConf ( Configuration conf , String attr , UnixUserGroupInformation ugi ) { } }
conf . set ( attr , ugi . toString ( ) ) ;
public class AbstractInputParser { /** * buildTokens . * @ param commandPrefix a char . * @ param tokens a { @ link java . util . List } object . * @ param depth a { @ link java . util . List } object . * @ param line a { @ link java . util . List } object . * @ param value a { @ link java . lang . String } object . */ protected void buildTokens ( final char commandPrefix , final List < Token > tokens , final List < NodeOc > depth , final List < NodeOc > line , final String value ) { } }
for ( int n = 0 ; n < line . size ( ) ; n ++ ) { if ( depth . size ( ) <= n ) { /* * This is a new command */ depth . add ( line . get ( n ) ) ; /* * an empty keyword means a positional parameter */ if ( line . get ( n ) . key . length ( ) > 0 ) { tokens . add ( new Token ( commandPrefix , dashed ( commandPrefix , line . get ( n ) . key ) , 0 , 0 , false ) ) ; } /* * if the depth so far is shorter than the namespace then it is * necessary to start a group */ if ( n < line . size ( ) - 1 ) tokens . add ( new Token ( commandPrefix , OPEN_GROUP , 0 , 0 , false ) ) ; } else if ( depth . get ( n ) . key . equalsIgnoreCase ( line . get ( n ) . key ) ) { /* * This is a subsequent command for the same namespace at this * level . Check the occurrence to see if a new multiple group ) ( * should be started . If the occurrence is the same then the * subsequent namespace will make it unique and this specific * node in the namespace can be safely ignored . In essence we * are within a sub - structure and only change instance * variables . */ if ( depth . get ( n ) . occurrance == line . get ( n ) . occurrance ) continue ; /* * A new occurrence of the same namespace node . So we close off * deeper groups . */ for ( int d = depth . size ( ) - 1 ; d > n ; d -- ) { depth . remove ( d ) ; tokens . add ( new Token ( commandPrefix , CLOSE_GROUP , 0 , 0 , false ) ) ; } depth . get ( n ) . occurrance = line . get ( n ) . occurrance ; if ( n < line . size ( ) - 1 ) tokens . add ( new Token ( commandPrefix , OPEN_GROUP , 0 , 0 , false ) ) ; continue ; } else { for ( int d = depth . size ( ) - 1 ; d > n ; d -- ) { depth . remove ( d ) ; tokens . add ( new Token ( commandPrefix , CLOSE_GROUP , 0 , 0 , false ) ) ; } depth . get ( n ) . key = line . get ( n ) . key ; depth . get ( n ) . occurrance = line . get ( n ) . occurrance ; /* * an empty keyword means a positional parameter */ if ( line . get ( n ) . key . length ( ) > 0 ) tokens . add ( new Token ( commandPrefix , dashed ( commandPrefix , line . get ( n ) . key ) , 0 , 0 , false ) ) ; if ( n < line . size ( ) - 1 ) tokens . add ( new Token ( commandPrefix , OPEN_GROUP , 0 , 0 , false ) ) ; continue ; } } /* * booleans have no value */ if ( value != null && value . length ( ) > 0 ) { tokens . add ( new Token ( commandPrefix , value , 0 , 0 , true ) ) ; }
public class ClusterState { /** * Attempts to leave the cluster . */ private void leave ( CompletableFuture < Void > future ) { } }
// Set a timer to retry the attempt to leave the cluster . leaveTimeout = context . getThreadContext ( ) . schedule ( context . getElectionTimeout ( ) , ( ) -> { leave ( future ) ; } ) ; // Attempt to leave the cluster by submitting a LeaveRequest directly to the server state . // Non - leader states should forward the request to the leader if there is one . Leader states // will log , replicate , and commit the reconfiguration . context . getServerState ( ) . leave ( LeaveRequest . builder ( ) . withMember ( member ( ) ) . build ( ) ) . whenComplete ( ( response , error ) -> { // Cancel the leave timer . cancelLeaveTimer ( ) ; if ( error == null && response . status ( ) == Response . Status . OK ) { Configuration configuration = new Configuration ( response . index ( ) , response . term ( ) , response . timestamp ( ) , response . members ( ) ) ; // Configure the cluster and commit the configuration as we know the successful response // indicates commitment . configure ( configuration ) . commit ( ) ; future . complete ( null ) ; } else { // Reset the leave timer . leaveTimeout = context . getThreadContext ( ) . schedule ( context . getElectionTimeout ( ) , ( ) -> { leave ( future ) ; } ) ; } } ) ;
public class ServerSecurityAlertPoliciesInner { /** * Creates or updates a threat detection policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param parameters The server security alert policy . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServerSecurityAlertPolicyInner > createOrUpdateAsync ( String resourceGroupName , String serverName , ServerSecurityAlertPolicyInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . map ( new Func1 < ServiceResponse < ServerSecurityAlertPolicyInner > , ServerSecurityAlertPolicyInner > ( ) { @ Override public ServerSecurityAlertPolicyInner call ( ServiceResponse < ServerSecurityAlertPolicyInner > response ) { return response . body ( ) ; } } ) ;