signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JstlCoreTLV { /** * set its type and delegate validation to super - class */
@ Override public ValidationMessage [ ] validate ( String prefix , String uri , PageData page ) { } } | return super . validate ( TYPE_CORE , prefix , uri , page ) ; |
public class ScheduleService { /** * 初始化增量采集数据状态保存数据源 */
private void initDatasource ( ) { } } | if ( this . isIncreamentImport ( ) ) { if ( this . esjdbc . getStatusDbConfig ( ) == null ) { statusDbname = this . esjdbc . getDbConfig ( ) . getDbName ( ) + "_config" ; String dbJNDIName = this . esjdbc . getDbConfig ( ) . getDbName ( ) + "_config" ; try { createStatusTableSQL = new StringBuilder ( ) . append ( "create table " ) . append ( statusTableName ) . append ( " (ID number(10),lasttime number(10),lastvalue number(10),lastvaluetype number(1),PRIMARY KEY (ID))" ) . toString ( ) ; File dbpath = new File ( statusStorePath ) ; logger . info ( "initDatasource dbpath:" + dbpath . getCanonicalPath ( ) ) ; SQLUtil . startPool ( statusDbname , "org.sqlite.JDBC" , "jdbc:sqlite://" + dbpath . getCanonicalPath ( ) , "root" , "root" , null , // " false " ,
null , // " READ _ UNCOMMITTED " ,
"select 1" , dbJNDIName , 10 , 10 , 20 , true , false , null , false , false ) ; } catch ( Exception e ) { throw new ESDataImportException ( e ) ; } } else { DBConfig statusDBConfig = esjdbc . getStatusDbConfig ( ) ; statusDbname = esjdbc . getStatusDbConfig ( ) . getDbName ( ) ; if ( ! statusDbname . equals ( esjdbc . getDbConfig ( ) . getDbName ( ) ) ) { String dbJNDIName = statusDbname + "_config" ; try { SQLUtil . startPool ( statusDbname , statusDBConfig . getDbDriver ( ) , statusDBConfig . getDbUrl ( ) , statusDBConfig . getDbUser ( ) , statusDBConfig . getDbPassword ( ) , null , // " false " ,
null , // " READ _ UNCOMMITTED " ,
statusDBConfig . getValidateSQL ( ) , dbJNDIName , 10 , 10 , 20 , true , false , null , false , false ) ; } catch ( Exception e ) { throw new ESDataImportException ( e ) ; } } createStatusTableSQL = statusDBConfig . getStatusTableDML ( ) ; if ( createStatusTableSQL == null ) { createStatusTableSQL = statusDBConfig . getCreateStatusTableSQL ( SQLUtil . getPool ( statusDbname ) . getDBType ( ) ) ; } createStatusTableSQL = createStatusTableSQL . replace ( "$statusTableName" , statusTableName ) ; } if ( esjdbc . getDateLastValueColumn ( ) != null ) { this . lastValueType = ImportIncreamentConfig . TIMESTAMP_TYPE ; } else if ( esjdbc . getNumberLastValueColumn ( ) != null ) { this . lastValueType = ImportIncreamentConfig . NUMBER_TYPE ; } else if ( esjdbc . getLastValueType ( ) != null ) { this . lastValueType = esjdbc . getLastValueType ( ) ; } else { this . lastValueType = ImportIncreamentConfig . NUMBER_TYPE ; } existSQL = new StringBuilder ( ) . append ( "select 1 from " ) . append ( statusTableName ) . toString ( ) ; selectSQL = new StringBuilder ( ) . append ( "select id,lasttime,lastvalue,lastvaluetype from " ) . append ( statusTableName ) . append ( " where id=?" ) . toString ( ) ; updateSQL = new StringBuilder ( ) . append ( "update " ) . append ( statusTableName ) . append ( " set lasttime = ?,lastvalue = ? ,lastvaluetype= ? where id=?" ) . toString ( ) ; insertSQL = new StringBuilder ( ) . append ( "insert into " ) . append ( statusTableName ) . append ( " (id,lasttime,lastvalue,lastvaluetype) values(?,?,?,?)" ) . toString ( ) ; } |
public class RuleBasedTokenizer { /** * De - tokenize wrongly tokenized URLs .
* @ param line
* the sentence
* @ return the sentence containing the correct URL */
private String detokenizeURLs ( String line ) { } } | final Matcher linkMatcher = wrongLink . matcher ( line ) ; final StringBuffer sb = new StringBuffer ( ) ; while ( linkMatcher . find ( ) ) { linkMatcher . appendReplacement ( sb , linkMatcher . group ( ) . replaceAll ( "\\s" , "" ) ) ; } linkMatcher . appendTail ( sb ) ; line = sb . toString ( ) ; return line ; |
public class JoltUtils { /** * Converts a standard json path to human readable SimpleTraversr compatible path
* @ param paths the path array of objects
* @ return string representation of the path , human readable and SimpleTraversr friendly */
public static String toSimpleTraversrPath ( Object [ ] paths ) { } } | StringBuilder pathBuilder = new StringBuilder ( ) ; for ( int i = 0 ; i < paths . length ; i ++ ) { Object path = paths [ i ] ; if ( path instanceof Integer ) { pathBuilder . append ( "[" ) . append ( ( ( Integer ) path ) . intValue ( ) ) . append ( "]" ) ; } else if ( path instanceof String ) { pathBuilder . append ( path . toString ( ) ) ; } else { throw new UnsupportedOperationException ( "Only Strings and Integers are supported as path element" ) ; } if ( ! ( i + 1 == paths . length ) ) { pathBuilder . append ( "." ) ; } } return pathBuilder . toString ( ) ; |
public class ConnectionFactoryValidator { /** * Validate a connection factory that implements javax . sql . DataSource .
* @ param ds data source instance .
* @ param user user name , if any , that is specified in the header of the validation request .
* @ param password password , if any , that is specified in the header of the validation request .
* @ param result validation result to which this method appends info .
* @ throws SQLException if an error occurs . */
private void validateDataSource ( DataSource ds , String user , @ Sensitive String password , LinkedHashMap < String , Object > result ) throws SQLException { } } | java . sql . Connection con = user == null ? ds . getConnection ( ) : ds . getConnection ( user , password ) ; try { DatabaseMetaData metadata = con . getMetaData ( ) ; result . put ( "databaseProductName" , metadata . getDatabaseProductName ( ) ) ; result . put ( "databaseProductVersion" , metadata . getDatabaseProductVersion ( ) ) ; result . put ( "driverName" , metadata . getDriverName ( ) ) ; result . put ( "driverVersion" , metadata . getDriverVersion ( ) ) ; try { String catalog = con . getCatalog ( ) ; if ( catalog != null && catalog . length ( ) > 0 ) result . put ( "catalog" , catalog ) ; } catch ( SQLFeatureNotSupportedException ignore ) { } try { String schema = con . getSchema ( ) ; if ( schema != null && schema . length ( ) > 0 ) result . put ( "schema" , schema ) ; } catch ( SQLFeatureNotSupportedException ignore ) { } String userName = metadata . getUserName ( ) ; if ( userName != null && userName . length ( ) > 0 ) result . put ( "user" , userName ) ; try { boolean isValid = con . isValid ( 120 ) ; // TODO better ideas for timeout value ?
if ( ! isValid ) result . put ( "failure" , "FALSE returned by JDBC driver's Connection.isValid operation" ) ; } catch ( SQLFeatureNotSupportedException x ) { } } finally { con . close ( ) ; } |
public class H2InboundLink { /** * Increment the connection window limit but the given amount
* @ param int amount to increment connection window
* @ throws FlowControlException */
public void incrementConnectionWindowUpdateLimit ( int x ) throws FlowControlException { } } | if ( ! checkIfGoAwaySendingOrClosing ( ) ) { writeQ . incrementConnectionWindowUpdateLimit ( x ) ; H2StreamProcessor stream ; for ( Integer i : streamTable . keySet ( ) ) { stream = streamTable . get ( i ) ; if ( stream != null ) { stream . connectionWindowSizeUpdated ( ) ; } } } |
public class CacheOptimizerStore { /** * Retrieve the cache for a project in a version
* @ param projectName The project name
* @ param projectVersion The project version
* @ return The cache found . If none available , a new one is created */
private Map < String , String > getCache ( String projectName , String projectVersion ) { } } | if ( ! caches . containsKey ( getCacheKey ( projectName , projectVersion ) ) ) { caches . put ( getCacheKey ( projectName , projectVersion ) , loadOrCreateCache ( projectName , projectVersion ) ) ; } return caches . get ( getCacheKey ( projectName , projectVersion ) ) ; |
public class ServletUtil { /** * / * getContextAttribute ( Servlet . . . ) */
public static final < T > T getContextAttribute ( Servlet servlet , String name , Class < T > cls , Set < GetOpts > opts , T defaultValue ) { } } | return getContextAttribute ( servlet . getServletConfig ( ) , name , cls , opts , defaultValue ) ; |
public class ReadRequest { /** * check a read request for validity concerning NSSP
* @ param request
* Tomcat servlet request
* @ param readResponse
* read response object
* @ return read request object < br >
* < b > null < / b > if the read request is invalid */
public static ReadRequest checkRequest ( final HttpServletRequest request , final ReadResponse readResponse ) { } } | final Node user = checkUserIdentifier ( request , readResponse ) ; if ( user != null ) { final Node poster = checkPosterIdentifier ( request , readResponse ) ; if ( poster != null ) { final int numItems = checkNumItems ( request , readResponse ) ; if ( numItems != 0 ) { final Boolean ownUpdates = checkOwnUpdates ( request , readResponse ) ; if ( ownUpdates != null ) { return new ReadRequest ( user , poster , numItems , ownUpdates ) ; } } } } return null ; |
public class ShiroRule { /** * Method description */
private void tearDownShiro ( ) { } } | try { SecurityManager securityManager = SecurityUtils . getSecurityManager ( ) ; LifecycleUtils . destroy ( securityManager ) ; ThreadContext . unbindSecurityManager ( ) ; ThreadContext . unbindSubject ( ) ; ThreadContext . remove ( ) ; } catch ( UnavailableSecurityManagerException e ) { // we don ' t care about this when cleaning up the test environment
// ( for example , maybe the subclass is a unit test and it didn ' t
// need a SecurityManager instance because it was using only mock Subject instances )
} SecurityUtils . setSecurityManager ( null ) ; |
public class AmazonElasticLoadBalancingClient { /** * Generates a stickiness policy with sticky session lifetimes controlled by the lifetime of the browser
* ( user - agent ) or a specified expiration period . This policy can be associated only with HTTP / HTTPS listeners .
* When a load balancer implements this policy , the load balancer uses a special cookie to track the instance for
* each request . When the load balancer receives a request , it first checks to see if this cookie is present in the
* request . If so , the load balancer sends the request to the application server specified in the cookie . If not ,
* the load balancer sends the request to a server that is chosen based on the existing load - balancing algorithm .
* A cookie is inserted into the response for binding subsequent requests from the same user to that server . The
* validity of the cookie is based on the cookie expiration time , which is specified in the policy configuration .
* For more information , see < a href =
* " http : / / docs . aws . amazon . com / elasticloadbalancing / latest / classic / elb - sticky - sessions . html # enable - sticky - sessions - duration "
* > Duration - Based Session Stickiness < / a > in the < i > Classic Load Balancers Guide < / i > .
* @ param createLBCookieStickinessPolicyRequest
* Contains the parameters for CreateLBCookieStickinessPolicy .
* @ return Result of the CreateLBCookieStickinessPolicy operation returned by the service .
* @ throws LoadBalancerNotFoundException
* The specified load balancer does not exist .
* @ throws DuplicatePolicyNameException
* A policy with the specified name already exists for this load balancer .
* @ throws TooManyPoliciesException
* The quota for the number of policies for this load balancer has been reached .
* @ throws InvalidConfigurationRequestException
* The requested configuration change is not valid .
* @ sample AmazonElasticLoadBalancing . CreateLBCookieStickinessPolicy
* @ see < a
* href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancing - 2012-06-01 / CreateLBCookieStickinessPolicy "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateLBCookieStickinessPolicyResult createLBCookieStickinessPolicy ( CreateLBCookieStickinessPolicyRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateLBCookieStickinessPolicy ( request ) ; |
public class JadeComponentProvider { /** * Set the ResourceLoader to use for resource locations . This will
* typically be a ResourcePatternResolver implementation .
* Default is PathMatchingResourcePatternResolver , also capable of
* resource pattern resolving through the ResourcePatternResolver
* interface .
* @ see org . springframework . core . io . support . ResourcePatternResolver
* @ see org . springframework . core . io . support . PathMatchingResourcePatternResolver */
public void setResourceLoader ( ResourceLoader resourceLoader ) { } } | this . resourcePatternResolver = ResourcePatternUtils . getResourcePatternResolver ( resourceLoader ) ; this . metadataReaderFactory = new CachingMetadataReaderFactory ( resourceLoader ) ; |
public class GosuClassTransformer { /** * Add constructor so Java can use the Gosu generic class without explicitly passing in type arguments .
* Note this constructor forwards to the given constructor with default type arguments . */
private void compileJavaInteropBridgeConstructor ( DynamicFunctionSymbol dfs ) { } } | DynamicFunctionSymbol copy = new DynamicFunctionSymbol ( dfs ) ; copy . setValue ( null ) ; copy . setInitializer ( null ) ; ConstructorStatement fs = new ConstructorStatement ( true ) ; fs . setDynamicFunctionSymbol ( copy ) ; fs . setSynthetic ( true ) ; MethodCallExpression expr = new MethodCallExpression ( ) ; expr . setType ( JavaTypes . pVOID ( ) ) ; List < ISymbol > args = dfs . getArgs ( ) ; Expression [ ] exprArgs = new Expression [ args . size ( ) ] ; for ( int i = 0 ; i < args . size ( ) ; i ++ ) { ISymbol arg = args . get ( i ) ; Identifier id = new Identifier ( ) ; id . setSymbol ( arg , dfs . getSymbolTable ( ) ) ; id . setType ( arg . getType ( ) ) ; exprArgs [ i ] = id ; } expr . setArgs ( exprArgs ) ; expr . setFunctionSymbol ( new ThisConstructorFunctionSymbol ( dfs , true ) ) ; MethodCallStatement stmt = new MethodCallStatement ( ) ; stmt . setMethodCall ( expr ) ; stmt . setSynthetic ( true ) ; copy . setValue ( stmt ) ; copy . setDeclFunctionStmt ( fs ) ; int iModifiers = getModifiers ( copy ) ; List < IRSymbol > parameters = new ArrayList < > ( ) ; maybeGetEnumSuperConstructorSymbols ( parameters ) ; for ( ISymbol param : copy . getArgs ( ) ) { parameters . add ( makeParamSymbol ( copy , param ) ) ; } setUpFunctionContext ( copy , true , parameters ) ; FunctionStatementTransformer funcStmtCompiler = new FunctionStatementTransformer ( copy , _context ) ; IRStatement methodBody = funcStmtCompiler . compile ( ) ; IExpression annotationDefault = copy . getAnnotationDefault ( ) ; Object [ ] annotationDefaultValue = null ; if ( annotationDefault != null ) { annotationDefaultValue = new Object [ ] { CompileTimeExpressionParser . convertValueToInfoFriendlyValue ( annotationDefault . evaluate ( ) , getGosuClass ( ) . getTypeInfo ( ) ) } ; } IRMethodStatement methodStatement = new IRMethodStatement ( methodBody , "<init>" , iModifiers , copy . isInternal ( ) , IRTypeConstants . pVOID ( ) , copy . getReturnType ( ) , parameters , copy . getArgTypes ( ) , copy . getType ( ) , annotationDefaultValue ) ; methodStatement . setAnnotations ( getIRAnnotations ( makeAnnotationInfos ( copy . getModifierInfo ( ) . getAnnotations ( ) , getGosuClass ( ) . getTypeInfo ( ) ) ) ) ; _irClass . addMethod ( methodStatement ) ; |
public class FadeTransitionFactory { @ Override public Animator getInAnimator ( View target , SlideShowView parent , int fromSlide , int toSlide ) { } } | target . setAlpha ( 0 ) ; target . setScaleX ( 1 ) ; target . setScaleY ( 1 ) ; target . setTranslationX ( 0 ) ; target . setTranslationY ( 0 ) ; target . setRotationX ( 0 ) ; target . setRotationY ( 0 ) ; ObjectAnimator animator = ObjectAnimator . ofFloat ( target , View . ALPHA , 1 ) ; animator . setDuration ( getDuration ( ) ) ; animator . setInterpolator ( getInterpolator ( ) ) ; return animator ; |
public class AWSOrganizationsClient { /** * Returns a list of the AWS services that you enabled to integrate with your organization . After a service on this
* list creates the resources that it requires for the integration , it can perform operations on your organization
* and its accounts .
* For more information about integrating other services with AWS Organizations , including the list of services that
* currently work with Organizations , see < a
* href = " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ integrate _ services . html " > Integrating AWS
* Organizations with Other AWS Services < / a > in the < i > AWS Organizations User Guide < / i > .
* This operation can be called only from the organization ' s master account .
* @ param listAWSServiceAccessForOrganizationRequest
* @ return Result of the ListAWSServiceAccessForOrganization operation returned by the service .
* @ throws AccessDeniedException
* You don ' t have permissions to perform the requested operation . The user or role that is making the
* request must have at least one IAM permissions policy attached that grants the required permissions . For
* more information , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / access . html " > Access
* Management < / a > in the < i > IAM User Guide < / i > .
* @ throws AWSOrganizationsNotInUseException
* Your account isn ' t a member of an organization . To make this request , you must use the credentials of an
* account that belongs to an organization .
* @ throws ConstraintViolationException
* Performing this operation violates a minimum or maximum value limit . For example , attempting to remove
* the last service control policy ( SCP ) from an OU or root , inviting or creating too many accounts to the
* organization , or attaching too many policies to an account , OU , or root . This exception includes a reason
* that contains additional information about the violated limit . < / p >
* Some of the reasons in the following list might not be applicable to this specific API or operation :
* < ul >
* < li >
* ACCOUNT _ NUMBER _ LIMIT _ EXCEEDED : You attempted to exceed the limit on the number of accounts in an
* organization . If you need more accounts , contact < a
* href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > to request an increase in your
* limit .
* Or the number of invitations that you tried to send would cause you to exceed the limit of accounts in
* your organization . Send fewer invitations or contact AWS Support to request an increase in the number of
* accounts .
* < note >
* Deleted and closed accounts still count toward your limit .
* < / note > < important >
* If you get receive this exception when running a command immediately after creating the organization ,
* wait one hour and try again . If after an hour it continues to fail with this error , contact < a
* href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > .
* < / important > < / li >
* < li >
* HANDSHAKE _ RATE _ LIMIT _ EXCEEDED : You attempted to exceed the number of handshakes that you can send in one
* day .
* < / li >
* < li >
* OU _ NUMBER _ LIMIT _ EXCEEDED : You attempted to exceed the number of OUs that you can have in an organization .
* < / li >
* < li >
* OU _ DEPTH _ LIMIT _ EXCEEDED : You attempted to create an OU tree that is too many levels deep .
* < / li >
* < li >
* ORGANIZATION _ NOT _ IN _ ALL _ FEATURES _ MODE : You attempted to perform an operation that requires the
* organization to be configured to support all features . An organization that supports only consolidated
* billing features can ' t perform this operation .
* < / li >
* < li >
* POLICY _ NUMBER _ LIMIT _ EXCEEDED . You attempted to exceed the number of policies that you can have in an
* organization .
* < / li >
* < li >
* MAX _ POLICY _ TYPE _ ATTACHMENT _ LIMIT _ EXCEEDED : You attempted to exceed the number of policies of a certain
* type that can be attached to an entity at one time .
* < / li >
* < li >
* MIN _ POLICY _ TYPE _ ATTACHMENT _ LIMIT _ EXCEEDED : You attempted to detach a policy from an entity that would
* cause the entity to have fewer than the minimum number of policies of a certain type required .
* < / li >
* < li >
* ACCOUNT _ CANNOT _ LEAVE _ WITHOUT _ EULA : You attempted to remove an account from the organization that doesn ' t
* yet have enough information to exist as a standalone account . This account requires you to first agree to
* the AWS Customer Agreement . Follow the steps at < a href =
* " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info "
* > To leave an organization when all required account information has not yet been provided < / a > in the
* < i > AWS Organizations User Guide < / i > .
* < / li >
* < li >
* ACCOUNT _ CANNOT _ LEAVE _ WITHOUT _ PHONE _ VERIFICATION : You attempted to remove an account from the organization
* that doesn ' t yet have enough information to exist as a standalone account . This account requires you to
* first complete phone verification . Follow the steps at < a href =
* " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info "
* > To leave an organization when all required account information has not yet been provided < / a > in the
* < i > AWS Organizations User Guide < / i > .
* < / li >
* < li >
* MASTER _ ACCOUNT _ PAYMENT _ INSTRUMENT _ REQUIRED : To create an organization with this master account , you first
* must associate a valid payment instrument , such as a credit card , with the account . Follow the steps at
* < a href =
* " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info "
* > To leave an organization when all required account information has not yet been provided < / a > in the
* < i > AWS Organizations User Guide < / i > .
* < / li >
* < li >
* MEMBER _ ACCOUNT _ PAYMENT _ INSTRUMENT _ REQUIRED : To complete this operation with this member account , you
* first must associate a valid payment instrument , such as a credit card , with the account . Follow the
* steps at < a href =
* " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info "
* > To leave an organization when all required account information has not yet been provided < / a > in the
* < i > AWS Organizations User Guide < / i > .
* < / li >
* < li >
* ACCOUNT _ CREATION _ RATE _ LIMIT _ EXCEEDED : You attempted to exceed the number of accounts that you can create
* in one day .
* < / li >
* < li >
* MASTER _ ACCOUNT _ ADDRESS _ DOES _ NOT _ MATCH _ MARKETPLACE : To create an account in this organization , you first
* must migrate the organization ' s master account to the marketplace that corresponds to the master
* account ' s address . For example , accounts with India addresses must be associated with the AISPL
* marketplace . All accounts in an organization must be associated with the same marketplace .
* < / li >
* < li >
* MASTER _ ACCOUNT _ MISSING _ CONTACT _ INFO : To complete this operation , you must first provide contact a valid
* address and phone number for the master account . Then try the operation again .
* < / li >
* < li >
* MASTER _ ACCOUNT _ NOT _ GOVCLOUD _ ENABLED : To complete this operation , the master account must have an
* associated account in the AWS GovCloud ( US - West ) Region . For more information , see < a
* href = " http : / / docs . aws . amazon . com / govcloud - us / latest / UserGuide / govcloud - organizations . html " > AWS
* Organizations < / a > in the < i > AWS GovCloud User Guide . < / i >
* < / li >
* @ throws InvalidInputException
* The requested operation failed because you provided invalid values for one or more of the request
* parameters . This exception includes a reason that contains additional information about the violated
* limit : < / p > < note >
* Some of the reasons in the following list might not be applicable to this specific API or operation :
* < / note >
* < ul >
* < li >
* IMMUTABLE _ POLICY : You specified a policy that is managed by AWS and can ' t be modified .
* < / li >
* < li >
* INPUT _ REQUIRED : You must include a value for all required parameters .
* < / li >
* < li >
* INVALID _ ENUM : You specified a value that isn ' t valid for that parameter .
* < / li >
* < li >
* INVALID _ FULL _ NAME _ TARGET : You specified a full name that contains invalid characters .
* < / li >
* < li >
* INVALID _ LIST _ MEMBER : You provided a list to a parameter that contains at least one invalid value .
* < / li >
* < li >
* INVALID _ PARTY _ TYPE _ TARGET : You specified the wrong type of entity ( account , organization , or email ) as a
* party .
* < / li >
* < li >
* INVALID _ PAGINATION _ TOKEN : Get the value for the < code > NextToken < / code > parameter from the response to a
* previous call of the operation .
* < / li >
* < li >
* INVALID _ PATTERN : You provided a value that doesn ' t match the required pattern .
* < / li >
* < li >
* INVALID _ PATTERN _ TARGET _ ID : You specified a policy target ID that doesn ' t match the required pattern .
* < / li >
* < li >
* INVALID _ ROLE _ NAME : You provided a role name that isn ' t valid . A role name can ' t begin with the reserved
* prefix < code > AWSServiceRoleFor < / code > .
* < / li >
* < li >
* INVALID _ SYNTAX _ ORGANIZATION _ ARN : You specified an invalid Amazon Resource Name ( ARN ) for the
* organization .
* < / li >
* < li >
* INVALID _ SYNTAX _ POLICY _ ID : You specified an invalid policy ID .
* < / li >
* < li >
* MAX _ FILTER _ LIMIT _ EXCEEDED : You can specify only one filter parameter for the operation .
* < / li >
* < li >
* MAX _ LENGTH _ EXCEEDED : You provided a string parameter that is longer than allowed .
* < / li >
* < li >
* MAX _ VALUE _ EXCEEDED : You provided a numeric parameter that has a larger value than allowed .
* < / li >
* < li >
* MIN _ LENGTH _ EXCEEDED : You provided a string parameter that is shorter than allowed .
* < / li >
* < li >
* MIN _ VALUE _ EXCEEDED : You provided a numeric parameter that has a smaller value than allowed .
* < / li >
* < li >
* MOVING _ ACCOUNT _ BETWEEN _ DIFFERENT _ ROOTS : You can move an account only between entities in the same root .
* < / li >
* @ throws ServiceException
* AWS Organizations can ' t complete your request because of an internal service error . Try again later .
* @ throws TooManyRequestsException
* You ' ve sent too many requests in too short a period of time . The limit helps protect against
* denial - of - service attacks . Try again later . < / p >
* For information on limits that affect Organizations , see < a
* href = " https : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ reference _ limits . html " > Limits of
* AWS Organizations < / a > in the < i > AWS Organizations User Guide < / i > .
* @ sample AWSOrganizations . ListAWSServiceAccessForOrganization
* @ see < a
* href = " http : / / docs . aws . amazon . com / goto / WebAPI / organizations - 2016-11-28 / ListAWSServiceAccessForOrganization "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListAWSServiceAccessForOrganizationResult listAWSServiceAccessForOrganization ( ListAWSServiceAccessForOrganizationRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAWSServiceAccessForOrganization ( request ) ; |
public class EventIDFilter { /** * Informs the filter that a receivable service is now active .
* For the events related with the service , and if there are no other
* services bound , then events of such event type should now not be filtered .
* @ param receivableService */
public void serviceActive ( ReceivableService receivableService ) { } } | for ( ReceivableEvent receivableEvent : receivableService . getReceivableEvents ( ) ) { Set < ServiceID > servicesReceivingEvent = eventID2serviceIDs . get ( receivableEvent . getEventType ( ) ) ; if ( servicesReceivingEvent == null ) { servicesReceivingEvent = new HashSet < ServiceID > ( ) ; Set < ServiceID > anotherSet = eventID2serviceIDs . putIfAbsent ( receivableEvent . getEventType ( ) , servicesReceivingEvent ) ; if ( anotherSet != null ) { servicesReceivingEvent = anotherSet ; } } synchronized ( servicesReceivingEvent ) { servicesReceivingEvent . add ( receivableService . getService ( ) ) ; } } |
public class GlobalTagLibraryCache { /** * add some GlobalTabLibConfig to the global tag libs we know about . If the provided
* config provides a classloader , we will load the TLDs via that class loaders , otherwise the
* JAR URL will be used to find the TLDs .
* @ param globalTagLibConfig The global tag lib config */
public void addGlobalTagLibConfig ( GlobalTagLibConfig globalTagLibConfig ) { } } | try { TldParser tldParser = new TldParser ( this , configManager , false , globalTagLibConfig . getClassloader ( ) ) ; if ( globalTagLibConfig . getClassloader ( ) == null ) loadTldFromJarInputStream ( globalTagLibConfig , tldParser ) ; else loadTldFromClassloader ( globalTagLibConfig , tldParser ) ; globalTagLibConfigList . add ( globalTagLibConfig ) ; } catch ( JspCoreException e ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . SEVERE ) ) { logger . logp ( Level . SEVERE , CLASS_NAME , "addGlobalTagLibConfig" , "failed to create TldParser " , e ) ; } } |
public class ResourceGroovyMethods { /** * Reads the file into a list of Strings , with one item for each line .
* @ param file a File
* @ return a List of lines
* @ throws IOException if an IOException occurs .
* @ see IOGroovyMethods # readLines ( java . io . Reader )
* @ since 1.0 */
public static List < String > readLines ( File file ) throws IOException { } } | return IOGroovyMethods . readLines ( newReader ( file ) ) ; |
public class Strings { /** * Makes sure a { @ linkplain String } is either { @ code null } or trimmed .
* @ param s the { @ linkplain String } to trim .
* @ return the submitted { @ linkplain String } in trimmed form or { @ code null } if { @ code null } was submitted . */
public static @ Nullable String trim ( @ Nullable String s ) { } } | return ( s != null ? s . trim ( ) : s ) ; |
public class SimpleLog { /** * Logs a message with < code > org . apache . commons . logging . impl . SimpleLog . LOG _ LEVEL _ ERROR < / code > .
* @ param message to log
* @ param t log this cause
* @ see Log # error ( Object , Throwable ) */
public final void error ( Object message , Throwable t ) { } } | if ( isLevelEnabled ( LOG_LEVEL_ERROR ) ) { log ( LOG_LEVEL_ERROR , message , t ) ; } |
public class BitMatrix { /** * A fast method to retrieve one row of data from the matrix as a BitArray .
* @ param y The row to retrieve
* @ param row An optional caller - allocated BitArray , will be allocated if null or too small
* @ return The resulting BitArray - this reference should always be used even when passing
* your own row */
public BitArray getRow ( int y , BitArray row ) { } } | if ( row == null || row . getSize ( ) < width ) { row = new BitArray ( width ) ; } else { row . clear ( ) ; } int offset = y * rowSize ; for ( int x = 0 ; x < rowSize ; x ++ ) { row . setBulk ( x * 32 , bits [ offset + x ] ) ; } return row ; |
public class CoverageDataPng { /** * Draw a coverage data image tile and format as PNG bytes from the double
* array of unsigned 16 bit integer pixel values formatted as
* int [ row ] [ width ]
* @ param unsignedPixelValues
* unsigned 16 bit integer pixel values as [ row ] [ width ]
* @ return coverage data image tile bytes */
public byte [ ] drawTileData ( int [ ] [ ] unsignedPixelValues ) { } } | BufferedImage image = drawTile ( unsignedPixelValues ) ; byte [ ] bytes = getImageBytes ( image ) ; return bytes ; |
public class BouncyCastleUtil { /** * Gets a boolean array representing bits of the KeyUsage extension .
* @ see java . security . cert . X509Certificate # getKeyUsage
* @ exception IOException if failed to extract the KeyUsage extension value . */
public static boolean [ ] getKeyUsage ( X509Extension ext ) throws IOException { } } | DERBitString bits = ( DERBitString ) getExtensionObject ( ext ) ; // copied from X509CertificateObject
byte [ ] bytes = bits . getBytes ( ) ; int length = ( bytes . length * 8 ) - bits . getPadBits ( ) ; boolean [ ] keyUsage = new boolean [ ( length < 9 ) ? 9 : length ] ; for ( int i = 0 ; i != length ; i ++ ) { keyUsage [ i ] = ( bytes [ i / 8 ] & ( 0x80 >>> ( i % 8 ) ) ) != 0 ; } return keyUsage ; |
public class HttpUpgradeHandlerWrapper { /** * / * ( non - Javadoc )
* @ see javax . servlet . http . HttpUpgradeHandler # destroy ( ) */
@ Override public void destroy ( ) { } } | // call predestroy
try { webapp . performPreDestroy ( wrappedHandler ) ; } catch ( InjectionException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "destroy injectionException during preDestroy: " , e ) ; } } wrappedHandler . destroy ( ) ; |
public class InstallUtil { /** * / * package */
static InstallState getNextInstallState ( InstallState current ) { } } | List < Function < Provider < InstallState > , InstallState > > installStateFilterChain = new ArrayList < > ( ) ; for ( InstallStateFilter setupExtension : InstallStateFilter . all ( ) ) { installStateFilterChain . add ( next -> setupExtension . getNextInstallState ( current , next ) ) ; } // Terminal condition : getNextState ( ) on the current install state
installStateFilterChain . add ( input -> { // Initially , install state is unknown and
// needs to be determined
if ( current == null || InstallState . UNKNOWN . equals ( current ) ) { return getDefaultInstallState ( ) ; } Map < InstallState , InstallState > states = new HashMap < > ( ) ; { states . put ( InstallState . CONFIGURE_INSTANCE , InstallState . INITIAL_SETUP_COMPLETED ) ; states . put ( InstallState . CREATE_ADMIN_USER , InstallState . CONFIGURE_INSTANCE ) ; states . put ( InstallState . INITIAL_PLUGINS_INSTALLING , InstallState . CREATE_ADMIN_USER ) ; states . put ( InstallState . INITIAL_SECURITY_SETUP , InstallState . NEW ) ; states . put ( InstallState . RESTART , InstallState . RUNNING ) ; states . put ( InstallState . UPGRADE , InstallState . INITIAL_SETUP_COMPLETED ) ; states . put ( InstallState . DOWNGRADE , InstallState . INITIAL_SETUP_COMPLETED ) ; states . put ( InstallState . INITIAL_SETUP_COMPLETED , InstallState . RUNNING ) ; } return states . get ( current ) ; } ) ; ProviderChain < InstallState > chain = new ProviderChain < > ( installStateFilterChain . iterator ( ) ) ; return chain . get ( ) ; |
public class SecurityRequirementValidator { /** * { @ inheritDoc } */
@ Override public void validate ( ValidationHelper helper , Context context , String key , SecurityRequirement t ) { } } | if ( ! ( t . isEmpty ( ) ) ) { Map < String , SecurityScheme > schemes = null ; if ( context . getModel ( ) . getComponents ( ) != null ) { schemes = context . getModel ( ) . getComponents ( ) . getSecuritySchemes ( ) ; } Set < String > h = t . keySet ( ) ; for ( String name : h ) { if ( schemes == null || ! schemes . containsKey ( name ) || schemes . get ( name ) == null ) { final String message = Tr . formatMessage ( tc , "securityRequirementNotDeclared" , name ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } else { String type = schemes . get ( name ) . getType ( ) != null ? schemes . get ( name ) . getType ( ) . toString ( ) : null ; List < String > value = t . get ( name ) ; if ( "oauth2" . equals ( type ) || "openIdConnect" . equals ( type ) ) { if ( value == null || value . isEmpty ( ) ) { final String message = Tr . formatMessage ( tc , "securityRequirementScopeNamesRequired" , name ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } } else if ( "apiKey" . equals ( type ) || "http" . equals ( type ) ) { if ( value != null && ! value . isEmpty ( ) ) { final String message = Tr . formatMessage ( tc , "securityRequirementFieldNotEmpty" , name , value ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } } } } } else { final String message = Tr . formatMessage ( tc , "securityRequirementIsEmpty" ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } |
public class PagingParams { /** * Creates a new PagingParams and sets it parameters from the AnyValueMap map
* @ param map a AnyValueMap to initialize this PagingParams
* @ return a newly created PagingParams . */
public static PagingParams fromMap ( AnyValueMap map ) { } } | Long skip = map . getAsNullableLong ( "skip" ) ; Long take = map . getAsNullableLong ( "take" ) ; boolean total = map . getAsBooleanWithDefault ( "total" , true ) ; return new PagingParams ( skip , take , total ) ; |
public class XNElement { /** * Returns the first child element with the given name .
* @ param name the child name
* @ return the XElement or null if not present */
public XNElement childElement ( String name ) { } } | for ( XNElement e : children ) { if ( e . name . equals ( name ) ) { return e ; } } return null ; |
public class CveDB { /** * Closes the database connection . Close should be called on this object
* when it is done being used . */
@ Override public synchronized void close ( ) { } } | if ( isOpen ( ) ) { clearCache ( ) ; closeStatements ( ) ; try { connection . close ( ) ; } catch ( SQLException ex ) { LOGGER . error ( "There was an error attempting to close the CveDB, see the log for more details." ) ; LOGGER . debug ( "" , ex ) ; } catch ( Throwable ex ) { LOGGER . error ( "There was an exception attempting to close the CveDB, see the log for more details." ) ; LOGGER . debug ( "" , ex ) ; } releaseResources ( ) ; connectionFactory . cleanup ( ) ; } |
public class LoaderUtil { /** * Loads and instantiates a derived class using its default constructor .
* @ param className The class name .
* @ param clazz The class to cast it to .
* @ param < T > The type of the class to check .
* @ return new instance of the class cast to { @ code T }
* @ throws ClassNotFoundException if the class isn ' t available to the usual ClassLoaders
* @ throws IllegalAccessException if the class can ' t be instantiated through a public constructor
* @ throws InstantiationException if there was an exception whilst instantiating the class
* @ throws NoSuchMethodException if there isn ' t a no - args constructor on the class
* @ throws InvocationTargetException if there was an exception whilst constructing the class
* @ throws ClassCastException if the constructed object isn ' t type compatible with { @ code T }
* @ since 2.1 */
public static < T > T newCheckedInstanceOf ( final String className , final Class < T > clazz ) throws ClassNotFoundException , NoSuchMethodException , InvocationTargetException , InstantiationException , IllegalAccessException { } } | return clazz . cast ( newInstanceOf ( className ) ) ; |
public class TreeInfo { /** * Get the start position for a tree node . The start position is
* defined to be the position of the first character of the first
* token of the node ' s source text .
* @ param tree The tree node */
public static int getStartPos ( JCTree tree ) { } } | if ( tree == null ) return Position . NOPOS ; switch ( tree . getTag ( ) ) { case MODULEDEF : { JCModuleDecl md = ( JCModuleDecl ) tree ; return md . mods . annotations . isEmpty ( ) ? md . pos : md . mods . annotations . head . pos ; } case PACKAGEDEF : { JCPackageDecl pd = ( JCPackageDecl ) tree ; return pd . annotations . isEmpty ( ) ? pd . pos : pd . annotations . head . pos ; } case APPLY : return getStartPos ( ( ( JCMethodInvocation ) tree ) . meth ) ; case ASSIGN : return getStartPos ( ( ( JCAssign ) tree ) . lhs ) ; case BITOR_ASG : case BITXOR_ASG : case BITAND_ASG : case SL_ASG : case SR_ASG : case USR_ASG : case PLUS_ASG : case MINUS_ASG : case MUL_ASG : case DIV_ASG : case MOD_ASG : case OR : case AND : case BITOR : case BITXOR : case BITAND : case EQ : case NE : case LT : case GT : case LE : case GE : case SL : case SR : case USR : case PLUS : case MINUS : case MUL : case DIV : case MOD : case POSTINC : case POSTDEC : return getStartPos ( ( ( JCOperatorExpression ) tree ) . getOperand ( LEFT ) ) ; case CLASSDEF : { JCClassDecl node = ( JCClassDecl ) tree ; if ( node . mods . pos != Position . NOPOS ) return node . mods . pos ; break ; } case CONDEXPR : return getStartPos ( ( ( JCConditional ) tree ) . cond ) ; case EXEC : return getStartPos ( ( ( JCExpressionStatement ) tree ) . expr ) ; case INDEXED : return getStartPos ( ( ( JCArrayAccess ) tree ) . indexed ) ; case METHODDEF : { JCMethodDecl node = ( JCMethodDecl ) tree ; if ( node . mods . pos != Position . NOPOS ) return node . mods . pos ; if ( node . typarams . nonEmpty ( ) ) // List . nil ( ) used for no typarams
return getStartPos ( node . typarams . head ) ; return node . restype == null ? node . pos : getStartPos ( node . restype ) ; } case SELECT : return getStartPos ( ( ( JCFieldAccess ) tree ) . selected ) ; case TYPEAPPLY : return getStartPos ( ( ( JCTypeApply ) tree ) . clazz ) ; case TYPEARRAY : return getStartPos ( ( ( JCArrayTypeTree ) tree ) . elemtype ) ; case TYPETEST : return getStartPos ( ( ( JCInstanceOf ) tree ) . expr ) ; case ANNOTATED_TYPE : { JCAnnotatedType node = ( JCAnnotatedType ) tree ; if ( node . annotations . nonEmpty ( ) ) { if ( node . underlyingType . hasTag ( TYPEARRAY ) || node . underlyingType . hasTag ( SELECT ) ) { return getStartPos ( node . underlyingType ) ; } else { return getStartPos ( node . annotations . head ) ; } } else { return getStartPos ( node . underlyingType ) ; } } case NEWCLASS : { JCNewClass node = ( JCNewClass ) tree ; if ( node . encl != null ) return getStartPos ( node . encl ) ; break ; } case VARDEF : { JCVariableDecl node = ( JCVariableDecl ) tree ; if ( node . mods . pos != Position . NOPOS ) { return node . mods . pos ; } else if ( node . vartype == null ) { // if there ' s no type ( partially typed lambda parameter )
// simply return node position
return node . pos ; } else { return getStartPos ( node . vartype ) ; } } case ERRONEOUS : { JCErroneous node = ( JCErroneous ) tree ; if ( node . errs != null && node . errs . nonEmpty ( ) ) return getStartPos ( node . errs . head ) ; } } return tree . pos ; |
public class AlipaySignature { /** * 验签并解密
* < b > 目前适用于公众号 < / b > < br >
* params参数示例 :
* < br > {
* < br > biz _ content = M0qGiGz + 8kIpxe8aF4geWJdBn0aBTuJRQItLHo9R7o5JGhpic / MIUjvXo2BLB + + BbkSq2OsJCEQFDZ0zK5AJYwvBgeRX30gvEj6eXqXRt16 / IkB9HzAccEqKmRHrZJ7PjQWE0KfvDAHsJqFIeMvEYk1Zei2QkwSQPlso7K0oheo / iT + HYE8aTATnkqD / ByD9iNDtGg38pCa2xnnns63abKsKoV8h0DfHWgPH62urGY7Pye3r9FCOXA2Ykm8X4 / Bl1bWFN / PFCEJHWe / HXj8KJKjWMO6ttsoV0xRGfeyUO8agu6t587Dl5ux5zD / s8Lbg5QXygaOwo3Fz1G8EqmGhi4 + soEIQb8DBYanQOS3X + m46tVqBGMw8Oe + hsyIMpsjwF4HaPKMr37zpW3fe7xOMuimbZ0wq53YP / jhQv6XWodjT3mL0H5ACqcsSn727B5ztquzCPiwrqyjUHjJQQefFTzOse8snaWNQTUsQS7aLsHq0FveGpSBYORyA90qPdiTjXIkVP7mAiYiAIWW9pCEC7F3XtViKTZ8FRMM9ySicfuAlf3jtap6v2KPMtQv70X + hlmzO / IXB6W0Ep8DovkF5rB4r / BJYJLw / 6AS0LZM9w5JfnAZhfGM2rKzpfNsgpOgEZS1WleG4I2hoQC0nxg9IcP0Hs + nWIPkEUcYNaiXqeBc = ,
* < br > sign = rlqgA8O + RzHBVYLyHmrbODVSANWPXf3pSrr82OCO / bm3upZiXSYrX5fZr6UBmG6BZRAydEyTIguEW6VRuAKjnaO / sOiR9BsSrOdXbD5Rhos / Xt7 / mGUWbTOt / F + 3W0 / XLuDNmuYg1yIC / 6hzkg44kgtdSTsQbOC9gWM7ayB4J4c = ,
* sign _ type = RSA ,
* < br > charset = UTF - 8
* < br > }
* @ param params
* @ param alipayPublicKey 支付宝公钥
* @ param cusPrivateKey 商户私钥
* @ param isCheckSign 是否验签
* @ param isDecrypt 是否解密
* @ return 解密后明文 , 验签失败则异常抛出
* @ throws AlipayApiException */
public static String checkSignAndDecrypt ( Map < String , String > params , String alipayPublicKey , String cusPrivateKey , boolean isCheckSign , boolean isDecrypt ) throws AlipayApiException { } } | String charset = params . get ( "charset" ) ; String bizContent = params . get ( "biz_content" ) ; if ( isCheckSign ) { if ( ! rsaCheckV2 ( params , alipayPublicKey , charset ) ) { throw new AlipayApiException ( "rsaCheck failure:rsaParams=" + params ) ; } } if ( isDecrypt ) { return rsaDecrypt ( bizContent , cusPrivateKey , charset ) ; } return bizContent ; |
public class CommerceAccountUserRelLocalServiceBaseImpl { /** * Updates the commerce account user rel in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceAccountUserRel the commerce account user rel
* @ return the commerce account user rel that was updated */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceAccountUserRel updateCommerceAccountUserRel ( CommerceAccountUserRel commerceAccountUserRel ) { } } | return commerceAccountUserRelPersistence . update ( commerceAccountUserRel ) ; |
public class SerializedForm { /** * Compute default Serializable fields from all members of ClassSymbol .
* Since the fields of ClassDocImpl might not contain private or
* package accessible fields , must walk over all members of ClassSymbol . */
private void computeDefaultSerializableFields ( DocEnv env , ClassSymbol def , ClassDocImpl cd ) { } } | for ( Scope . Entry e = def . members ( ) . elems ; e != null ; e = e . sibling ) { if ( e . sym != null && e . sym . kind == Kinds . VAR ) { VarSymbol f = ( VarSymbol ) e . sym ; if ( ( f . flags ( ) & Flags . STATIC ) == 0 && ( f . flags ( ) & Flags . TRANSIENT ) == 0 ) { // # # # No modifier filtering applied here .
FieldDocImpl fd = env . getFieldDoc ( f ) ; // # # # Add to beginning .
// # # # Preserve order used by old ' javadoc ' .
fields . prepend ( fd ) ; } } } |
public class DistCp { /** * Delete the dst files / dirs which do not exist in src */
static private void deleteNonexisting ( FileSystem dstfs , FileStatus dstroot , Path dstsorted , FileSystem jobfs , Path jobdir , JobConf jobconf , Configuration conf ) throws IOException { } } | if ( ! dstroot . isDir ( ) ) { throw new IOException ( "dst must be a directory when option " + Options . DELETE . cmd + " is set, but dst (= " + dstroot . getPath ( ) + ") is not a directory." ) ; } // write dst lsr results
final Path dstlsr = new Path ( jobdir , "_distcp_dst_lsr" ) ; final SequenceFile . Writer writer = SequenceFile . createWriter ( jobfs , jobconf , dstlsr , Text . class , FileStatus . class , SequenceFile . CompressionType . NONE ) ; try { // do lsr to get all file statuses in dstroot
final Stack < FileStatus > lsrstack = new Stack < FileStatus > ( ) ; for ( lsrstack . push ( dstroot ) ; ! lsrstack . isEmpty ( ) ; ) { final FileStatus status = lsrstack . pop ( ) ; if ( status . isDir ( ) ) { for ( FileStatus child : dstfs . listStatus ( status . getPath ( ) ) ) { String relative = makeRelative ( dstroot . getPath ( ) , child . getPath ( ) ) ; writer . append ( new Text ( relative ) , child ) ; lsrstack . push ( child ) ; } } } } finally { checkAndClose ( writer ) ; } // sort lsr results
final Path sortedlsr = new Path ( jobdir , "_distcp_dst_lsr_sorted" ) ; SequenceFile . Sorter sorter = new SequenceFile . Sorter ( jobfs , new Text . Comparator ( ) , Text . class , FileStatus . class , jobconf ) ; sorter . sort ( dstlsr , sortedlsr ) ; // compare lsr list and dst list
SequenceFile . Reader lsrin = null ; SequenceFile . Reader dstin = null ; try { lsrin = new SequenceFile . Reader ( jobfs , sortedlsr , jobconf ) ; dstin = new SequenceFile . Reader ( jobfs , dstsorted , jobconf ) ; // compare sorted lsr list and sorted dst list
final Text lsrpath = new Text ( ) ; final FileStatus lsrstatus = new FileStatus ( ) ; final Text dstpath = new Text ( ) ; final Text dstfrom = new Text ( ) ; final FsShell shell = new FsShell ( conf ) ; final String [ ] shellargs = { "-rmr" , null } ; boolean hasnext = dstin . next ( dstpath , dstfrom ) ; for ( ; lsrin . next ( lsrpath , lsrstatus ) ; ) { int dst_cmp_lsr = dstpath . compareTo ( lsrpath ) ; for ( ; hasnext && dst_cmp_lsr < 0 ; ) { hasnext = dstin . next ( dstpath , dstfrom ) ; dst_cmp_lsr = dstpath . compareTo ( lsrpath ) ; } if ( dst_cmp_lsr == 0 ) { // lsrpath exists in dst , skip it
hasnext = dstin . next ( dstpath , dstfrom ) ; } else { // lsrpath does not exist , delete it
String s = new Path ( dstroot . getPath ( ) , lsrpath . toString ( ) ) . toString ( ) ; if ( shellargs [ 1 ] == null || ! isAncestorPath ( shellargs [ 1 ] , s ) ) { shellargs [ 1 ] = s ; int r = 0 ; try { r = shell . run ( shellargs ) ; } catch ( Exception e ) { throw new IOException ( "Exception from shell." , e ) ; } if ( r != 0 ) { throw new IOException ( "\"" + shellargs [ 0 ] + " " + shellargs [ 1 ] + "\" returns non-zero value " + r ) ; } } } } } finally { checkAndClose ( lsrin ) ; checkAndClose ( dstin ) ; } |
public class PropertiesConverter { /** * Retrieve ( in string format ) from this field .
* @ return This property from the property field . */
public String getString ( ) { } } | if ( this . getField ( ) instanceof PropertiesField ) // Always
return ( ( PropertiesField ) this . getField ( ) ) . getProperty ( m_strProperty ) ; return DBConstants . BLANK ; |
public class FormField { /** * Returns the first value of this form fold or { @ code null } .
* @ return the first value or { @ code null }
* @ since 4.3 */
public String getFirstValue ( ) { } } | CharSequence firstValue ; synchronized ( values ) { if ( values . isEmpty ( ) ) { return null ; } firstValue = values . get ( 0 ) ; } return firstValue . toString ( ) ; |
public class AWSKMSClient { /** * Deletes key material that you previously imported . This operation makes the specified customer master key ( CMK )
* unusable . For more information about importing key material into AWS KMS , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / importing - keys . html " > Importing Key Material < / a > in the
* < i > AWS Key Management Service Developer Guide < / i > . You cannot perform this operation on a CMK in a different AWS
* account .
* When the specified CMK is in the < code > PendingDeletion < / code > state , this operation does not change the CMK ' s
* state . Otherwise , it changes the CMK ' s state to < code > PendingImport < / code > .
* After you delete key material , you can use < a > ImportKeyMaterial < / a > to reimport the same key material into the
* CMK .
* The result of this operation varies with the key state of the CMK . For details , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / key - state . html " > How Key State Affects Use of a
* Customer Master Key < / a > in the < i > AWS Key Management Service Developer Guide < / i > .
* @ param deleteImportedKeyMaterialRequest
* @ return Result of the DeleteImportedKeyMaterial operation returned by the service .
* @ throws InvalidArnException
* The request was rejected because a specified ARN was not valid .
* @ throws UnsupportedOperationException
* The request was rejected because a specified parameter is not supported or a specified resource is not
* valid for this operation .
* @ throws DependencyTimeoutException
* The system timed out while trying to fulfill the request . The request can be retried .
* @ throws NotFoundException
* The request was rejected because the specified entity or resource could not be found .
* @ throws KMSInternalException
* The request was rejected because an internal exception occurred . The request can be retried .
* @ throws KMSInvalidStateException
* The request was rejected because the state of the specified resource is not valid for this request . < / p >
* For more information about how key state affects the use of a CMK , see < a
* href = " http : / / docs . aws . amazon . com / kms / latest / developerguide / key - state . html " > How Key State Affects Use of a
* Customer Master Key < / a > in the < i > AWS Key Management Service Developer Guide < / i > .
* @ sample AWSKMS . DeleteImportedKeyMaterial
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kms - 2014-11-01 / DeleteImportedKeyMaterial " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DeleteImportedKeyMaterialResult deleteImportedKeyMaterial ( DeleteImportedKeyMaterialRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteImportedKeyMaterial ( request ) ; |
public class Group { /** * Search for element name in the sorted regular expression
* list , if found return the group name . If not , return null .
* @ param elementName Name of element to be found in the regular
* expression list . */
String regExpGroupName ( String elementName ) { } } | for ( String regexp : sortedRegExpList ) { if ( elementName . startsWith ( regexp ) ) { return regExpGroupMap . get ( regexp ) ; } } return null ; |
public class RunGraphResponse { /** * < pre >
* If the request asked for execution stats or cost graph , these are returned
* here .
* < / pre >
* < code > optional . tensorflow . StepStats step _ stats = 2 ; < / code > */
public org . tensorflow . framework . StepStats getStepStats ( ) { } } | return stepStats_ == null ? org . tensorflow . framework . StepStats . getDefaultInstance ( ) : stepStats_ ; |
public class ListRecoveryPointsByResourceResult { /** * An array of objects that contain detailed information about recovery points of the specified resource type .
* @ param recoveryPoints
* An array of objects that contain detailed information about recovery points of the specified resource
* type . */
public void setRecoveryPoints ( java . util . Collection < RecoveryPointByResource > recoveryPoints ) { } } | if ( recoveryPoints == null ) { this . recoveryPoints = null ; return ; } this . recoveryPoints = new java . util . ArrayList < RecoveryPointByResource > ( recoveryPoints ) ; |
public class HiveQueryExecutionWriter { /** * Method to add properties needed by publisher to preserve partition params */
private void addPropsForPublisher ( QueryBasedHiveConversionEntity hiveConversionEntity ) { } } | if ( ! hiveConversionEntity . getPartition ( ) . isPresent ( ) ) { return ; } ConvertibleHiveDataset convertibleHiveDataset = hiveConversionEntity . getConvertibleHiveDataset ( ) ; for ( String format : convertibleHiveDataset . getDestFormats ( ) ) { Optional < ConvertibleHiveDataset . ConversionConfig > conversionConfigForFormat = convertibleHiveDataset . getConversionConfigForFormat ( format ) ; if ( ! conversionConfigForFormat . isPresent ( ) ) { continue ; } SchemaAwareHivePartition sourcePartition = hiveConversionEntity . getHivePartition ( ) . get ( ) ; // Get complete source partition name dbName @ tableName @ partitionName
String completeSourcePartitionName = StringUtils . join ( Arrays . asList ( sourcePartition . getTable ( ) . getDbName ( ) , sourcePartition . getTable ( ) . getTableName ( ) , sourcePartition . getName ( ) ) , AT_CHAR ) ; ConvertibleHiveDataset . ConversionConfig config = conversionConfigForFormat . get ( ) ; // Get complete destination partition name dbName @ tableName @ partitionName
String completeDestPartitionName = StringUtils . join ( Arrays . asList ( config . getDestinationDbName ( ) , config . getDestinationTableName ( ) , sourcePartition . getName ( ) ) , AT_CHAR ) ; workUnit . setProp ( HiveConvertPublisher . COMPLETE_SOURCE_PARTITION_NAME , completeSourcePartitionName ) ; workUnit . setProp ( HiveConvertPublisher . COMPLETE_DEST_PARTITION_NAME , completeDestPartitionName ) ; } |
public class CmsGroupTransferList { /** * Returns the list of groups to display . < p >
* @ return the list of groups to display
* @ throws CmsException if something goes wrong */
protected List < CmsGroup > getGroups ( ) throws CmsException { } } | return CmsPrincipal . filterCoreGroups ( OpenCms . getOrgUnitManager ( ) . getGroups ( getCms ( ) , "" , true ) ) ; |
public class IsisModuleSecurityRealm { /** * region > execute ( Isis integration ) */
< V > V execute ( final TransactionalClosureWithReturn < V > closure ) { } } | return getSessionFactory ( ) . doInSession ( new Callable < V > ( ) { @ Override public V call ( ) { PersistenceSession persistenceSession = getPersistenceSession ( ) ; persistenceSession . getServicesInjector ( ) . injectServicesInto ( closure ) ; return doExecute ( closure ) ; } } ) ; |
public class TransactionOLTP { /** * Utility function to get a read - only Tinkerpop traversal .
* @ return A read - only Tinkerpop traversal for manually traversing the graph */
public GraphTraversalSource getTinkerTraversal ( ) { } } | operateOnOpenGraph ( ( ) -> null ) ; // This is to check if the graph is open
if ( graphTraversalSource == null ) { graphTraversalSource = janusGraph . traversal ( ) . withStrategies ( ReadOnlyStrategy . instance ( ) ) ; } return graphTraversalSource ; |
public class BasicStreamReader { /** * Method called to parse beginning of the next event within
* document tree , and return its type . */
private final int nextFromTree ( ) throws XMLStreamException { } } | int i ; // First , do we need to finish currently open token ?
if ( mTokenState < mStTextThreshold ) { // No need to update state . . . will get taken care of
/* 03 - Mar - 2006 , TSa : Let ' s add a sanity check here , temporarily ,
* to ensure we never skip any textual content when it is
* to be validated */
if ( mVldContent == XMLValidator . CONTENT_ALLOW_VALIDATABLE_TEXT ) { if ( mCurrToken == CHARACTERS || mCurrToken == CDATA ) { // should never happen
throwParseError ( "Internal error: skipping validatable text" ) ; } } i = skipToken ( ) ; // note : skipToken ( ) updates the start location
} else { // Start / end elements are never unfinished ( ie . are always
// completely read in )
if ( mCurrToken == START_ELEMENT ) { // Start tag may be an empty tag :
if ( mStEmptyElem ) { // and if so , we ' ll then get ' virtual ' close tag :
mStEmptyElem = false ; // . . . and location info is correct already
// 27 - Feb - 2009 , TSa : but we do have to handle validation of the end tag now
int vld = mElementStack . validateEndElement ( ) ; mVldContent = vld ; mValidateText = ( vld == XMLValidator . CONTENT_ALLOW_VALIDATABLE_TEXT ) ; return END_ELEMENT ; } } else if ( mCurrToken == END_ELEMENT ) { // Close tag removes current element from stack
if ( ! mElementStack . pop ( ) ) { // false if root closed
// if so , we ' ll get to epilog , unless in fragment mode
if ( ! mConfig . inputParsingModeFragment ( ) ) { return closeContentTree ( ) ; } // in fragment mode , fine , we ' ll just continue
} } else if ( mCurrToken == CDATA && mTokenState <= TOKEN_PARTIAL_SINGLE ) { /* Just returned a partial CDATA . . . that ' s ok , just need to
* know we won ' t get opening marker etc .
* The tricky part here is just to ensure there ' s at least
* one character ; if not , need to just discard the empty
* ' event ' ( note that it is possible to have an initial
* empty CDATA event for truly empty CDATA block ; but not
* partial ones ! ) . Let ' s just read it like a new
* CData section first : */
// First , need to update the start location . . .
mTokenInputTotal = mCurrInputProcessed + mInputPtr ; mTokenInputRow = mCurrInputRow ; mTokenInputCol = mInputPtr - mCurrInputRowStart ; char c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : getNextChar ( SUFFIX_IN_CDATA ) ; if ( readCDataPrimary ( c ) ) { // got it all !
// note : can not be in coalescing mode at this point ;
// as we can never have partial cdata without unfinished token
// . . . still need to have gotten at least 1 char though :
if ( mTextBuffer . size ( ) > 0 ) { return CDATA ; } // otherwise need to continue and parse the next event
} else { // Hmmh . Have to verify we get at least one char from
// CData section ; if so , we are good to go for now ;
// if not , need to get that damn char first :
if ( mTextBuffer . size ( ) == 0 && readCDataSecondary ( mCfgLazyParsing ? 1 : mShortestTextSegment ) ) { // Ok , all of it read
if ( mTextBuffer . size ( ) > 0 ) { // And had some contents
mTokenState = TOKEN_FULL_SINGLE ; return CDATA ; } // if nothing read , we ' ll just fall back ( see below )
} else { // good enough !
mTokenState = TOKEN_PARTIAL_SINGLE ; return CDATA ; } } /* If we get here , it was the end of the section , without
* any more text inside CDATA , so let ' s just continue */
} // Once again , need to update the start location info :
mTokenInputTotal = mCurrInputProcessed + mInputPtr ; mTokenInputRow = mCurrInputRow ; mTokenInputCol = mInputPtr - mCurrInputRowStart ; i = getNext ( ) ; } if ( i < 0 ) { // 07 - Oct - 2005 , TSa : May be ok in fragment mode ( not otherwise ) ,
// but we can just check if element stack has anything , as that handles all cases
if ( ! mElementStack . isEmpty ( ) ) { throwUnexpectedEOF ( ) ; } return handleEOF ( false ) ; } /* 26 - Aug - 2004 , TSa : We have to deal with entities , usually , if
* they are the next thing ; even in non - expanding mode there
* are entities and then there are entities . . . : - )
* Let ' s start with char entities ; they can be expanded right away . */
while ( i == '&' ) { mWsStatus = ALL_WS_UNKNOWN ; /* 30 - Aug - 2004 , TSa : In some contexts entities are not
* allowed in any way , shape or form : */
if ( mVldContent == XMLValidator . CONTENT_ALLOW_NONE ) { /* May be char entity , general entity ; whatever it is it ' s
* invalid ! */
reportInvalidContent ( ENTITY_REFERENCE ) ; } /* Need to call different methods based on whether we can do
* automatic entity expansion or not : */
int ch = mCfgReplaceEntities ? fullyResolveEntity ( true ) : resolveCharOnlyEntity ( true ) ; if ( ch != 0 ) { /* Char - entity . . . need to initialize text output buffer , then ;
* independent of whether it ' ll be needed or not . */
/* 30 - Aug - 2004 , TSa : In some contexts only white space is
* accepted . . . */
if ( mVldContent <= XMLValidator . CONTENT_ALLOW_WS ) { // As per xml specs , only straight white space is legal
if ( ch > CHAR_SPACE ) { /* 21 - Sep - 2008 , TSa : Used to also require a call to
* ' mElementStack . reallyValidating ' , if only ws
* allowed , to cover the case where non - typing - dtd
* was only used to discover SPACE type . But
* now that we have CONTENT _ ALLOW _ WS _ NONSTRICT ,
* shouldn ' t be needed . */
// if ( mVldContent < XMLValidator . CONTENT _ ALLOW _ WS | | mElementStack . reallyValidating ( ) ) {
reportInvalidContent ( CHARACTERS ) ; } } TextBuffer tb = mTextBuffer ; tb . resetInitialized ( ) ; if ( ch <= 0xFFFF ) { tb . append ( ( char ) ch ) ; } else { ch -= 0x10000 ; tb . append ( ( char ) ( ( ch >> 10 ) + 0xD800 ) ) ; tb . append ( ( char ) ( ( ch & 0x3FF ) + 0xDC00 ) ) ; } mTokenState = TOKEN_STARTED ; return CHARACTERS ; } /* Nope ; was a general entity . . . in auto - mode , it ' s now been
* expanded ; in non - auto , need to figure out entity itself . */
if ( ! mCfgReplaceEntities || mCfgTreatCharRefsAsEntities ) { if ( ! mCfgTreatCharRefsAsEntities ) { final EntityDecl ed = resolveNonCharEntity ( ) ; // Note : ed may still be null at this point
mCurrEntity = ed ; } // Note : ed may still be null at this point
mTokenState = TOKEN_FULL_COALESCED ; /* / / let ' s not worry about non - parsed entities , since this is unexpanded mode
/ / . . . although it ' d be an error either way ? Should we report it ?
if ( ed ! = null & & ! ed . isParsed ( ) ) {
throwParseError ( " Reference to unparsed entity ' " + ed . getName ( ) + " ' from content not allowed . " ) ; */
return ENTITY_REFERENCE ; } // Otherwise automatic expansion fine ; just need the next char :
i = getNextChar ( SUFFIX_IN_DOC ) ; } if ( i == '<' ) { // Markup
// And then it should be easy to figure out type :
char c = getNextChar ( SUFFIX_IN_ELEMENT ) ; if ( c == '?' ) { // proc . inst
// 30 - Aug - 2004 , TSa : Not legal for EMPTY elements
if ( mVldContent == XMLValidator . CONTENT_ALLOW_NONE ) { reportInvalidContent ( PROCESSING_INSTRUCTION ) ; } return readPIPrimary ( ) ; } if ( c == '!' ) { // CDATA or comment
// Need to figure out bit more first . . .
int type = nextFromTreeCommentOrCData ( ) ; // 30 - Aug - 2004 , TSa : Not legal for EMPTY elements
if ( mVldContent == XMLValidator . CONTENT_ALLOW_NONE ) { reportInvalidContent ( type ) ; } return type ; } if ( c == '/' ) { // always legal ( if name matches etc )
readEndElem ( ) ; return END_ELEMENT ; } if ( c == ':' || isNameStartChar ( c ) ) { /* Note : checking for EMPTY content type is done by the
* validator , no need to check here */
handleStartElem ( c ) ; return START_ELEMENT ; } if ( c == '[' ) { throwUnexpectedChar ( c , " in content after '<' (malformed <![CDATA[]] directive?)" ) ; } throwUnexpectedChar ( c , " in content after '<' (malformed start element?)." ) ; } /* Text . . . ok ; better parse the ' easy ' ( consequtive ) portions right
* away , since that ' s practically free ( still need to scan those
* characters no matter what , even if skipping ) . */
/* But first , do we expect to get ignorable white space ( only happens
* in validating mode ) ? If so , needs bit different handling : */
if ( mVldContent <= XMLValidator . CONTENT_ALLOW_WS_NONSTRICT ) { if ( mVldContent == XMLValidator . CONTENT_ALLOW_NONE ) { if ( mElementStack . reallyValidating ( ) ) { reportInvalidContent ( CHARACTERS ) ; } } if ( i <= CHAR_SPACE ) { /* Note : need not worry about coalescing , since non - whitespace
* text is illegal ( ie . can not have CDATA ) */
mTokenState = ( readSpacePrimary ( ( char ) i , false ) ) ? TOKEN_FULL_COALESCED : TOKEN_STARTED ; return SPACE ; } // Problem if we are really validating ; otherwise not
if ( mElementStack . reallyValidating ( ) ) { reportInvalidContent ( CHARACTERS ) ; } /* otherwise , we know it ' s supposed to contain just space ( or
* be empty ) , but as we are not validating it ' s not an error
* for this not to be true . Type should be changed to
* CHARACTERS tho . */
} // Further , when coalescing , can not be sure if we REALLY got it all
if ( readTextPrimary ( ( char ) i ) ) { // reached following markup
mTokenState = TOKEN_FULL_SINGLE ; } else { // If not coalescing , this may be enough for current event
if ( ! mCfgCoalesceText && mTextBuffer . size ( ) >= mShortestTextSegment ) { mTokenState = TOKEN_PARTIAL_SINGLE ; } else { mTokenState = TOKEN_STARTED ; } } return CHARACTERS ; |
public class DateFormatSymbols { /** * Initializes format symbols using another instance .
* TODO Clean up initialization methods for subclasses */
void initializeData ( DateFormatSymbols dfs ) { } } | this . eras = dfs . eras ; this . eraNames = dfs . eraNames ; this . narrowEras = dfs . narrowEras ; this . months = dfs . months ; this . shortMonths = dfs . shortMonths ; this . narrowMonths = dfs . narrowMonths ; this . standaloneMonths = dfs . standaloneMonths ; this . standaloneShortMonths = dfs . standaloneShortMonths ; this . standaloneNarrowMonths = dfs . standaloneNarrowMonths ; this . weekdays = dfs . weekdays ; this . shortWeekdays = dfs . shortWeekdays ; this . shorterWeekdays = dfs . shorterWeekdays ; this . narrowWeekdays = dfs . narrowWeekdays ; this . standaloneWeekdays = dfs . standaloneWeekdays ; this . standaloneShortWeekdays = dfs . standaloneShortWeekdays ; this . standaloneShorterWeekdays = dfs . standaloneShorterWeekdays ; this . standaloneNarrowWeekdays = dfs . standaloneNarrowWeekdays ; this . ampms = dfs . ampms ; this . ampmsNarrow = dfs . ampmsNarrow ; this . timeSeparator = dfs . timeSeparator ; this . shortQuarters = dfs . shortQuarters ; this . quarters = dfs . quarters ; this . standaloneShortQuarters = dfs . standaloneShortQuarters ; this . standaloneQuarters = dfs . standaloneQuarters ; this . leapMonthPatterns = dfs . leapMonthPatterns ; this . shortYearNames = dfs . shortYearNames ; this . shortZodiacNames = dfs . shortZodiacNames ; this . abbreviatedDayPeriods = dfs . abbreviatedDayPeriods ; this . wideDayPeriods = dfs . wideDayPeriods ; this . narrowDayPeriods = dfs . narrowDayPeriods ; this . standaloneAbbreviatedDayPeriods = dfs . standaloneAbbreviatedDayPeriods ; this . standaloneWideDayPeriods = dfs . standaloneWideDayPeriods ; this . standaloneNarrowDayPeriods = dfs . standaloneNarrowDayPeriods ; this . zoneStrings = dfs . zoneStrings ; // always null at initialization time for now
this . localPatternChars = dfs . localPatternChars ; this . capitalization = dfs . capitalization ; this . actualLocale = dfs . actualLocale ; this . validLocale = dfs . validLocale ; this . requestedLocale = dfs . requestedLocale ; |
public class OmemoService { /** * Return a copy of the given deviceList of user contact , but with stale devices marked as inactive .
* Never mark our own device as stale . If we haven ' t yet received a message from a device , store the current date
* as last date of message receipt to allow future decisions .
* A stale device is a device , from which we haven ' t received an OMEMO message from for more than
* " maxAgeMillis " milliseconds .
* @ param userDevice our OmemoDevice .
* @ param contact subjects BareJid .
* @ param contactsDeviceList subjects deviceList .
* @ return copy of subjects deviceList with stale devices marked as inactive . */
private OmemoCachedDeviceList removeStaleDevicesFromDeviceList ( OmemoDevice userDevice , BareJid contact , OmemoCachedDeviceList contactsDeviceList , int maxAgeHours ) { } } | OmemoCachedDeviceList deviceList = new OmemoCachedDeviceList ( contactsDeviceList ) ; // Don ' t work on original list .
// Iterate through original list , but modify copy instead
for ( int deviceId : contactsDeviceList . getActiveDevices ( ) ) { OmemoDevice device = new OmemoDevice ( contact , deviceId ) ; Date lastDeviceIdPublication = getOmemoStoreBackend ( ) . getDateOfLastDeviceIdPublication ( userDevice , device ) ; if ( lastDeviceIdPublication == null ) { lastDeviceIdPublication = new Date ( ) ; getOmemoStoreBackend ( ) . setDateOfLastDeviceIdPublication ( userDevice , device , lastDeviceIdPublication ) ; } Date lastMessageReceived = getOmemoStoreBackend ( ) . getDateOfLastReceivedMessage ( userDevice , device ) ; if ( lastMessageReceived == null ) { lastMessageReceived = new Date ( ) ; getOmemoStoreBackend ( ) . setDateOfLastReceivedMessage ( userDevice , device , lastMessageReceived ) ; } boolean stale = isStale ( userDevice , device , lastDeviceIdPublication , maxAgeHours ) ; stale &= isStale ( userDevice , device , lastMessageReceived , maxAgeHours ) ; if ( stale ) { deviceList . addInactiveDevice ( deviceId ) ; } } return deviceList ; |
public class RestBinaryHandlerImpl { /** * Returns a default Content - Disposition { @ link String } for a given binary property .
* @ param binaryProperty a non - null { @ link javax . jcr . Property }
* @ return a non - null String which represents a valid Content - Disposition .
* @ throws javax . jcr . RepositoryException if any JCR related operation involving the binary property fail . */
@ Override public String getDefaultContentDisposition ( Property binaryProperty ) throws RepositoryException { } } | Node parentNode = getParentNode ( binaryProperty ) ; String parentName = parentNode . getName ( ) ; if ( StringUtil . isBlank ( parentName ) ) { parentName = "binary" ; } return DEFAULT_CONTENT_DISPOSITION_PREFIX + parentName ; |
public class GetMaintenanceWindowExecutionTaskInvocationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetMaintenanceWindowExecutionTaskInvocationRequest getMaintenanceWindowExecutionTaskInvocationRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getMaintenanceWindowExecutionTaskInvocationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getMaintenanceWindowExecutionTaskInvocationRequest . getWindowExecutionId ( ) , WINDOWEXECUTIONID_BINDING ) ; protocolMarshaller . marshall ( getMaintenanceWindowExecutionTaskInvocationRequest . getTaskId ( ) , TASKID_BINDING ) ; protocolMarshaller . marshall ( getMaintenanceWindowExecutionTaskInvocationRequest . getInvocationId ( ) , INVOCATIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ExpressionRuleUserList { /** * Gets the rule value for this ExpressionRuleUserList .
* @ return rule * Boolean rule that defines this user list . The rule consists
* of a list of rule item groups and
* each rule item group consists of a list of rule
* items .
* All the rule item groups are ORed together for
* evaluation before version V201705.
* Starting from version V201705 , the group operator
* is based on { @ link Rule # getRuleType ( ) } .
* This field is selected by default .
* < span class = " constraint Selectable " > This field
* can be selected using the value " ExpressionListRule " . < / span >
* < span class = " constraint Required " > This field is
* required and should not be { @ code null } when it is contained within
* { @ link Operator } s : ADD . < / span > */
public com . google . api . ads . adwords . axis . v201809 . rm . Rule getRule ( ) { } } | return rule ; |
public class ArgumentParser { /** * Handle the - - buildfile , - - file , - f argument */
private void handleArgBuildFile ( final Deque < String > args ) { } } | final String value = args . pop ( ) ; if ( value == null ) { throw new BuildException ( "You must specify a buildfile when using the --buildfile argument" ) ; } buildFile = new File ( value . replace ( '/' , File . separatorChar ) ) ; |
public class VectorPointer { /** * xor : oldIndex ^ index */
public void gotoNextBlockStartWritable ( int index , int xor ) { } } | // goto block start pos
if ( xor < ( 1 << 10 ) ) { // level = 1
if ( depth == 1 ) { display1 = new Object [ 32 ] ; display1 [ 0 ] = display0 ; depth += 1 ; } display0 = new Object [ 32 ] ; display1 [ ( index >> 5 ) & 31 ] = display0 ; } else if ( xor < ( 1 << 15 ) ) { // level = 2
if ( depth == 2 ) { display2 = new Object [ 32 ] ; display2 [ 0 ] = display1 ; depth += 1 ; } display0 = new Object [ 32 ] ; display1 = new Object [ 32 ] ; display1 [ ( index >> 5 ) & 31 ] = display0 ; display2 [ ( index >> 10 ) & 31 ] = display1 ; } else if ( xor < ( 1 << 20 ) ) { // level = 3
if ( depth == 3 ) { display3 = new Object [ 32 ] ; display3 [ 0 ] = display2 ; depth += 1 ; } display0 = new Object [ 32 ] ; display1 = new Object [ 32 ] ; display2 = new Object [ 32 ] ; display1 [ ( index >> 5 ) & 31 ] = display0 ; display2 [ ( index >> 10 ) & 31 ] = display1 ; display3 [ ( index >> 15 ) & 31 ] = display2 ; } else if ( xor < ( 1 << 25 ) ) { // level = 4
if ( depth == 4 ) { display4 = new Object [ 32 ] ; display4 [ 0 ] = display3 ; depth += 1 ; } display0 = new Object [ 32 ] ; display1 = new Object [ 32 ] ; display2 = new Object [ 32 ] ; display3 = new Object [ 32 ] ; display1 [ ( index >> 5 ) & 31 ] = display0 ; display2 [ ( index >> 10 ) & 31 ] = display1 ; display3 [ ( index >> 15 ) & 31 ] = display2 ; display4 [ ( index >> 20 ) & 31 ] = display3 ; } else if ( xor < ( 1 << 30 ) ) { // level = 5
if ( depth == 5 ) { display5 = new Object [ 32 ] ; display5 [ 0 ] = display4 ; depth += 1 ; } display0 = new Object [ 32 ] ; display1 = new Object [ 32 ] ; display2 = new Object [ 32 ] ; display3 = new Object [ 32 ] ; display4 = new Object [ 32 ] ; display1 [ ( index >> 5 ) & 31 ] = display0 ; display2 [ ( index >> 10 ) & 31 ] = display1 ; display3 [ ( index >> 15 ) & 31 ] = display2 ; display4 [ ( index >> 20 ) & 31 ] = display3 ; display5 [ ( index >> 25 ) & 31 ] = display4 ; } else { // level = 6
throw new IllegalArgumentException ( ) ; } |
public class DefaultValidationResultsModel { /** * TODO : test */
public void replaceMessage ( ValidationMessage messageToReplace , ValidationMessage replacementMessage ) { } } | ValidationResults oldValidationResults = validationResults ; List newMessages = new ArrayList ( oldValidationResults . getMessages ( ) ) ; final boolean containsMessageToReplace = validationResults . getMessages ( ) . contains ( messageToReplace ) ; if ( containsMessageToReplace ) { newMessages . remove ( messageToReplace ) ; } newMessages . add ( replacementMessage ) ; validationResults = new DefaultValidationResults ( newMessages ) ; fireChangedEvents ( ) ; if ( containsMessageToReplace && ! ObjectUtils . nullSafeEquals ( messageToReplace . getProperty ( ) , replacementMessage . getProperty ( ) ) ) { fireValidationResultsChanged ( messageToReplace . getProperty ( ) ) ; } fireValidationResultsChanged ( replacementMessage . getProperty ( ) ) ; |
public class DriverLoader { /** * Loads the specified class by registering the supplied paths to the class
* loader and then registers the driver with the driver manager . The
* pathToDriver argument is added to the class loader so that an external
* driver can be loaded . Note , the pathToDriver can contain a semi - colon
* separated list of paths so any dependencies can be added as needed . If a
* path in the pathToDriver argument is a directory all files in the
* directory are added to the class path .
* @ param className the fully qualified name of the desired class
* @ param pathToDriver the path to the JAR file containing the driver ; note ,
* this can be a semi - colon separated list of paths
* @ return the loaded Driver
* @ throws DriverLoadException thrown if the driver cannot be loaded */
@ SuppressWarnings ( "StringSplitter" ) public static Driver load ( String className , String pathToDriver ) throws DriverLoadException { } } | final ClassLoader parent = ClassLoader . getSystemClassLoader ( ) ; final List < URL > urls = new ArrayList < > ( ) ; final String [ ] paths = pathToDriver . split ( File . pathSeparator ) ; for ( String path : paths ) { final File file = new File ( path ) ; if ( file . isDirectory ( ) ) { final File [ ] files = file . listFiles ( ) ; if ( files != null ) { for ( File f : files ) { try { urls . add ( f . toURI ( ) . toURL ( ) ) ; } catch ( MalformedURLException ex ) { LOGGER . debug ( "Unable to load database driver '{}'; invalid path provided '{}'" , className , f . getAbsoluteFile ( ) , ex ) ; throw new DriverLoadException ( "Unable to load database driver. Invalid path provided" , ex ) ; } } } } else if ( file . exists ( ) ) { try { urls . add ( file . toURI ( ) . toURL ( ) ) ; } catch ( MalformedURLException ex ) { LOGGER . debug ( "Unable to load database driver '{}'; invalid path provided '{}'" , className , file . getAbsoluteFile ( ) , ex ) ; throw new DriverLoadException ( "Unable to load database driver. Invalid path provided" , ex ) ; } } } final URLClassLoader loader = AccessController . doPrivileged ( new PrivilegedAction < URLClassLoader > ( ) { @ Override public URLClassLoader run ( ) { return new URLClassLoader ( urls . toArray ( new URL [ urls . size ( ) ] ) , parent ) ; } } ) ; return load ( className , loader ) ; |
public class BlockTableBox { /** * Goes through the list of child boxes and organizes them into captions , header ,
* footer , etc . */
private void organizeContent ( ) { } } | table = new TableBox ( el , g , ctx ) ; table . adoptParent ( this ) ; table . setStyle ( style ) ; for ( Iterator < Box > it = nested . iterator ( ) ; it . hasNext ( ) ; ) { Box box = it . next ( ) ; if ( box instanceof TableCaptionBox ) { caption = ( TableCaptionBox ) box ; } else if ( box instanceof BlockBox && ( ( BlockBox ) box ) . isPositioned ( ) ) { // positioned boxes are ignored
} else // other elements belong to the table itself
{ table . addSubBox ( box ) ; box . setContainingBlockBox ( table ) ; box . setParent ( table ) ; it . remove ( ) ; endChild -- ; } } addSubBox ( table ) ; |
public class Flowable { /** * Mirrors the one Publisher in an array of several Publishers that first either emits an item or sends
* a termination notification .
* < img width = " 640 " height = " 385 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / amb . png " alt = " " >
* < dl >
* < dt > < b > Backpressure : < / b > < / dt >
* < dd > The operator itself doesn ' t interfere with backpressure which is determined by the winning
* { @ code Publisher } ' s backpressure behavior . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > { @ code ambArray } does not operate by default on a particular { @ link Scheduler } . < / dd >
* < / dl >
* @ param < T > the common element type
* @ param sources
* an array of Publisher sources competing to react first . A subscription to each Publisher will
* occur in the same order as in this Iterable .
* @ return a Flowable that emits the same sequence as whichever of the source Publishers first
* emitted an item or sent a termination notification
* @ see < a href = " http : / / reactivex . io / documentation / operators / amb . html " > ReactiveX operators documentation : Amb < / a > */
@ CheckReturnValue @ BackpressureSupport ( BackpressureKind . PASS_THROUGH ) @ SchedulerSupport ( SchedulerSupport . NONE ) public static < T > Flowable < T > ambArray ( Publisher < ? extends T > ... sources ) { } } | ObjectHelper . requireNonNull ( sources , "sources is null" ) ; int len = sources . length ; if ( len == 0 ) { return empty ( ) ; } else if ( len == 1 ) { return fromPublisher ( sources [ 0 ] ) ; } return RxJavaPlugins . onAssembly ( new FlowableAmb < T > ( sources , null ) ) ; |
public class ReferenceField { /** * Get the referenced record ' s ID given the code .
* This method converts the code to the record ID by reading the secondary key of the referenced record .
* @ param strCode The code to convert ( ie . , " EMAIL _ TYPE " )
* @ return int The ID of the referenced record ( or 0 if not found ) . */
public int getIDFromCode ( String strCode ) { } } | int iID = 0 ; try { iID = Integer . parseInt ( strCode ) ; // Special case - if an integer , just convert it .
} catch ( NumberFormatException ex ) { iID = 0 ; } if ( iID == 0 ) { Record record = this . getReferenceRecord ( ) ; if ( record != null ) iID = record . getIDFromCode ( strCode ) ; } return iID ; |
public class ApiOvhXdsl { /** * Alter this object properties
* REST : PUT / xdsl / { serviceName } / modem / lan / { lanName } / dhcp / { dhcpName } / DHCPStaticAddresses / { MACAddress }
* @ param body [ required ] New object properties
* @ param serviceName [ required ] The internal name of your XDSL offer
* @ param lanName [ required ] Name of the LAN
* @ param dhcpName [ required ] Name of the DHCP
* @ param MACAddress [ required ] The MAC address of the device */
public void serviceName_modem_lan_lanName_dhcp_dhcpName_DHCPStaticAddresses_MACAddress_PUT ( String serviceName , String lanName , String dhcpName , String MACAddress , OvhDHCPStaticAddress body ) throws IOException { } } | String qPath = "/xdsl/{serviceName}/modem/lan/{lanName}/dhcp/{dhcpName}/DHCPStaticAddresses/{MACAddress}" ; StringBuilder sb = path ( qPath , serviceName , lanName , dhcpName , MACAddress ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ; |
public class Path { /** * Find the provided key in the tree rooted at node , and store the root to it in the path
* @ param node the tree to search in
* @ param comparator the comparator defining the order on the tree
* @ param target the key to search for
* @ param mode the type of search to perform
* @ param forwards if the path should be setup for forward or backward iteration
* @ param < V > */
< V > void find ( Object [ ] node , Comparator < V > comparator , Object target , Op mode , boolean forwards ) { } } | // TODO : should not require parameter ' forwards ' - consider modifying index to represent both
// child and key position , as opposed to just key position ( which necessitates a different value depending
// on which direction you ' re moving in . Prerequisite for making Path public and using to implement general
// search
depth = - 1 ; if ( target instanceof BTree . Special ) { if ( target == POSITIVE_INFINITY ) moveEnd ( node , forwards ) ; else if ( target == NEGATIVE_INFINITY ) moveStart ( node , forwards ) ; else throw new AssertionError ( ) ; return ; } while ( true ) { int keyEnd = getKeyEnd ( node ) ; // search for the target in the current node
int i = BTree . find ( comparator , target , node , 0 , keyEnd ) ; if ( i >= 0 ) { // exact match . transform exclusive bounds into the correct index by moving back or forwards one
push ( node , i ) ; switch ( mode ) { case HIGHER : successor ( ) ; break ; case LOWER : predecessor ( ) ; } return ; } i = - i - 1 ; // traverse into the appropriate child
if ( ! isLeaf ( node ) ) { push ( node , forwards ? i - 1 : i ) ; node = ( Object [ ] ) node [ keyEnd + i ] ; continue ; } // bottom of the tree and still not found . pick the right index to satisfy Op
switch ( mode ) { case FLOOR : case LOWER : i -- ; } if ( i < 0 ) { push ( node , 0 ) ; predecessor ( ) ; } else if ( i >= keyEnd ) { push ( node , keyEnd - 1 ) ; successor ( ) ; } else { push ( node , i ) ; } return ; } |
public class ObjectToJsonArrayBinding { /** * Getting a String value from a JDBC ResultSet and converting that to a JsonArray */
@ Override public void get ( BindingGetResultSetContext < JsonArray > ctx ) throws SQLException { } } | ctx . convert ( converter ( ) ) . value ( ctx . resultSet ( ) . getString ( ctx . index ( ) ) ) ; |
public class AbstractSmsService { /** * Returns the message of the request . If no message is specified the default message will be
* returned .
* @ param smsSendRequestDto the sms request
* @ return the message
* @ throws IllegalArgumentException if no message is specified at all */
protected String getMessage ( final SmsSendRequestDto smsSendRequestDto ) { } } | if ( StringUtils . isNotBlank ( smsSendRequestDto . getMessage ( ) ) ) { return smsSendRequestDto . getMessage ( ) ; } Validate . notEmpty ( defaultMessage , "defaultMessage must not be null or blank" ) ; return defaultMessage ; |
public class CmsImportVersion10 { /** * Adds the XML digester rules for resource access control entries . < p >
* @ param digester the digester to add the rules to
* @ param xpath the base xpath for the rules */
protected void addResourceAceRules ( Digester digester , String xpath ) { } } | String xp_ace = xpath + N_ACCESSCONTROL_ENTRIES + "/" + N_ACCESSCONTROL_ENTRY ; digester . addCallMethod ( xp_ace , "addAccessControlEntry" ) ; digester . addCallMethod ( xp_ace + "/" + N_ACCESSCONTROL_PRINCIPAL , "setAcePrincipalId" , 0 ) ; digester . addCallMethod ( xp_ace + "/" + N_FLAGS , "setAceFlags" , 0 ) ; String xp_perms = xp_ace + "/" + N_ACCESSCONTROL_PERMISSIONSET + "/" ; digester . addCallMethod ( xp_perms + N_ACCESSCONTROL_ALLOWEDPERMISSIONS , "setAcePermissionsAllowed" , 0 ) ; digester . addCallMethod ( xp_perms + N_ACCESSCONTROL_DENIEDPERMISSIONS , "setAcePermissionsDenied" , 0 ) ; |
public class Config13DefaultSources { /** * The classloader ' s loadResources method is used to locate resources of
* name { # link ConfigConstants . CONFIG _ PROPERTIES } as well as process environment
* variables and Java System . properties
* @ param classloader
* @ return the default sources found */
public static ArrayList < ConfigSource > getDefaultSources ( ClassLoader classloader ) { } } | ArrayList < ConfigSource > sources = new ArrayList < > ( ) ; sources . add ( new SystemConfigSource ( ) ) ; sources . add ( new EnvConfig13Source ( ) ) ; sources . add ( new AppPropertyConfigSource ( ) ) ; sources . add ( new ServerXMLVariableConfigSource ( ) ) ; sources . addAll ( getPropertiesFileConfigSources ( classloader ) ) ; return sources ; |
public class IPDImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . IPD__IOC_ADAT : setIOCAdat ( IOC_ADAT_EDEFAULT ) ; return ; case AfplibPackage . IPD__IMAGE_DATA : setImageData ( IMAGE_DATA_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ; |
public class CommerceCountryUtil { /** * Returns the last commerce country in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce country , or < code > null < / code > if a matching commerce country could not be found */
public static CommerceCountry fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CommerceCountry > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; |
public class KeyVaultClientBaseImpl { /** * Creates a signature from a digest using the specified key .
* The SIGN operation is applicable to asymmetric and symmetric keys stored in Azure Key Vault since this operation uses the private portion of the key . This operation requires the keys / sign permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ param keyName The name of the key .
* @ param keyVersion The version of the key .
* @ param algorithm The signing / verification algorithm identifier . For more information on possible algorithm types , see JsonWebKeySignatureAlgorithm . Possible values include : ' PS256 ' , ' PS384 ' , ' PS512 ' , ' RS256 ' , ' RS384 ' , ' RS512 ' , ' RSNULL ' , ' ES256 ' , ' ES384 ' , ' ES512 ' , ' ES256K '
* @ param value the Base64Url value
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < KeyOperationResult > signAsync ( String vaultBaseUrl , String keyName , String keyVersion , JsonWebKeySignatureAlgorithm algorithm , byte [ ] value , final ServiceCallback < KeyOperationResult > serviceCallback ) { } } | return ServiceFuture . fromResponse ( signWithServiceResponseAsync ( vaultBaseUrl , keyName , keyVersion , algorithm , value ) , serviceCallback ) ; |
public class FullDTDReader { /** * Undeclared parameter entity is a VC , not WFC . . . */
@ Override protected void handleUndeclaredEntity ( String id ) throws XMLStreamException { } } | _reportVCViolation ( "Undeclared parameter entity '" + id + "'." ) ; if ( mCurrAttrDefault != null ) { Location loc = getLastCharLocation ( ) ; if ( mExpandingPE ) { mCurrAttrDefault . addUndeclaredPE ( id , loc ) ; } else { mCurrAttrDefault . addUndeclaredGE ( id , loc ) ; } } if ( mEventListener != null ) { // GEs only matter when expanding . . .
if ( mExpandingPE ) { mEventListener . dtdSkippedEntity ( "%" + id ) ; } } |
public class CliParser { /** * Get the value of cveValidForHours .
* @ return the value of cveValidForHours */
public Integer getCveValidForHours ( ) { } } | final String v = line . getOptionValue ( ARGUMENT . CVE_VALID_FOR_HOURS ) ; if ( v != null ) { return Integer . parseInt ( v ) ; } return null ; |
public class AwsSecurityFindingFilters { /** * The canonical AWS external region name where this resource is located .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setResourceRegion ( java . util . Collection ) } or { @ link # withResourceRegion ( java . util . Collection ) } if you want
* to override the existing values .
* @ param resourceRegion
* The canonical AWS external region name where this resource is located .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withResourceRegion ( StringFilter ... resourceRegion ) { } } | if ( this . resourceRegion == null ) { setResourceRegion ( new java . util . ArrayList < StringFilter > ( resourceRegion . length ) ) ; } for ( StringFilter ele : resourceRegion ) { this . resourceRegion . add ( ele ) ; } return this ; |
public class AssertSoapFaultBuilder { /** * Set explicit SOAP fault validator implementation by bean name .
* @ param validatorName
* @ param applicationContext
* @ return */
public AssertSoapFaultBuilder validator ( String validatorName , ApplicationContext applicationContext ) { } } | action . setValidator ( applicationContext . getBean ( validatorName , SoapFaultValidator . class ) ) ; return this ; |
public class TransformerImpl { /** * Create a ContentHandler from a Result object and an OutputProperties .
* @ param outputTarget Where the transform result should go ,
* should not be null .
* @ param format The OutputProperties object that will contain
* instructions on how to serialize the output .
* @ return A valid ContentHandler that will create the
* result tree when it is fed SAX events .
* @ throws TransformerException */
public SerializationHandler createSerializationHandler ( Result outputTarget , OutputProperties format ) throws TransformerException { } } | SerializationHandler xoh ; // If the Result object contains a Node , then create
// a ContentHandler that will add nodes to the input node .
org . w3c . dom . Node outputNode = null ; if ( outputTarget instanceof DOMResult ) { outputNode = ( ( DOMResult ) outputTarget ) . getNode ( ) ; org . w3c . dom . Node nextSibling = ( ( DOMResult ) outputTarget ) . getNextSibling ( ) ; org . w3c . dom . Document doc ; short type ; if ( null != outputNode ) { type = outputNode . getNodeType ( ) ; doc = ( org . w3c . dom . Node . DOCUMENT_NODE == type ) ? ( org . w3c . dom . Document ) outputNode : outputNode . getOwnerDocument ( ) ; } else { boolean isSecureProcessing = m_stylesheetRoot . isSecureProcessing ( ) ; doc = org . apache . xml . utils . DOMHelper . createDocument ( isSecureProcessing ) ; outputNode = doc ; type = outputNode . getNodeType ( ) ; ( ( DOMResult ) outputTarget ) . setNode ( outputNode ) ; } DOMBuilder handler = ( org . w3c . dom . Node . DOCUMENT_FRAGMENT_NODE == type ) ? new DOMBuilder ( doc , ( org . w3c . dom . DocumentFragment ) outputNode ) : new DOMBuilder ( doc , outputNode ) ; if ( nextSibling != null ) handler . setNextSibling ( nextSibling ) ; String encoding = format . getProperty ( OutputKeys . ENCODING ) ; xoh = new ToXMLSAXHandler ( handler , ( LexicalHandler ) handler , encoding ) ; } else if ( outputTarget instanceof SAXResult ) { ContentHandler handler = ( ( SAXResult ) outputTarget ) . getHandler ( ) ; if ( null == handler ) throw new IllegalArgumentException ( "handler can not be null for a SAXResult" ) ; LexicalHandler lexHandler ; if ( handler instanceof LexicalHandler ) lexHandler = ( LexicalHandler ) handler ; else lexHandler = null ; String encoding = format . getProperty ( OutputKeys . ENCODING ) ; String method = format . getProperty ( OutputKeys . METHOD ) ; ToXMLSAXHandler toXMLSAXHandler = new ToXMLSAXHandler ( handler , lexHandler , encoding ) ; toXMLSAXHandler . setShouldOutputNSAttr ( false ) ; xoh = toXMLSAXHandler ; String publicID = format . getProperty ( OutputKeys . DOCTYPE_PUBLIC ) ; String systemID = format . getProperty ( OutputKeys . DOCTYPE_SYSTEM ) ; if ( systemID != null ) xoh . setDoctypeSystem ( systemID ) ; if ( publicID != null ) xoh . setDoctypePublic ( publicID ) ; if ( handler instanceof TransformerClient ) { XalanTransformState state = new XalanTransformState ( ) ; ( ( TransformerClient ) handler ) . setTransformState ( state ) ; ( ( ToSAXHandler ) xoh ) . setTransformState ( state ) ; } } // Otherwise , create a ContentHandler that will serialize the
// result tree to either a stream or a writer .
else if ( outputTarget instanceof StreamResult ) { StreamResult sresult = ( StreamResult ) outputTarget ; try { SerializationHandler serializer = ( SerializationHandler ) SerializerFactory . getSerializer ( format . getProperties ( ) ) ; if ( null != sresult . getWriter ( ) ) serializer . setWriter ( sresult . getWriter ( ) ) ; else if ( null != sresult . getOutputStream ( ) ) serializer . setOutputStream ( sresult . getOutputStream ( ) ) ; else if ( null != sresult . getSystemId ( ) ) { String fileURL = sresult . getSystemId ( ) ; if ( fileURL . startsWith ( "file:///" ) ) { if ( fileURL . substring ( 8 ) . indexOf ( ":" ) > 0 ) fileURL = fileURL . substring ( 8 ) ; else fileURL = fileURL . substring ( 7 ) ; } else if ( fileURL . startsWith ( "file:/" ) ) { if ( fileURL . substring ( 6 ) . indexOf ( ":" ) > 0 ) fileURL = fileURL . substring ( 6 ) ; else fileURL = fileURL . substring ( 5 ) ; } m_outputStream = new java . io . FileOutputStream ( fileURL ) ; serializer . setOutputStream ( m_outputStream ) ; xoh = serializer ; } else throw new TransformerException ( XSLMessages . createMessage ( XSLTErrorResources . ER_NO_OUTPUT_SPECIFIED , null ) ) ; // " No output specified ! " ) ;
// handler = serializer . asContentHandler ( ) ;
// this . setSerializer ( serializer ) ;
xoh = serializer ; } // catch ( UnsupportedEncodingException uee )
// throw new TransformerException ( uee ) ;
catch ( IOException ioe ) { throw new TransformerException ( ioe ) ; } } else { throw new TransformerException ( XSLMessages . createMessage ( XSLTErrorResources . ER_CANNOT_TRANSFORM_TO_RESULT_TYPE , new Object [ ] { outputTarget . getClass ( ) . getName ( ) } ) ) ; // " Can ' t transform to a Result of type "
// + outputTarget . getClass ( ) . getName ( )
} // before we forget , lets make the created handler hold a reference
// to the current TransformImpl object
xoh . setTransformer ( this ) ; SourceLocator srcLocator = getStylesheet ( ) ; xoh . setSourceLocator ( srcLocator ) ; return xoh ; |
public class SAXDriver { /** * < b > SAX2 < / b > : Assigns the specified property . Like SAX1 handlers , these
* may be changed at any time . */
@ Override public void setProperty ( String propertyId , Object value ) throws SAXNotRecognizedException , SAXNotSupportedException { } } | // see if the property is recognized
getProperty ( propertyId ) ; // Properties with a defined value , we just change it if we can .
if ( ( PROPERTY + "declaration-handler" ) . equals ( propertyId ) ) { if ( value == null ) { declHandler = base ; } else if ( ! ( value instanceof DeclHandler ) ) { throw new SAXNotSupportedException ( propertyId ) ; } else { declHandler = ( DeclHandler ) value ; } return ; } if ( ( PROPERTY + "lexical-handler" ) . equals ( propertyId ) ) { if ( value == null ) { lexicalHandler = base ; } else if ( ! ( value instanceof LexicalHandler ) ) { throw new SAXNotSupportedException ( propertyId ) ; } else { lexicalHandler = ( LexicalHandler ) value ; } return ; } throw new SAXNotSupportedException ( propertyId ) ; |
public class AmazonMachineLearningClient { /** * Returns a list of < code > DataSource < / code > that match the search criteria in the request .
* @ param describeDataSourcesRequest
* @ return Result of the DescribeDataSources operation returned by the service .
* @ throws InvalidInputException
* An error on the client occurred . Typically , the cause is an invalid input value .
* @ throws InternalServerException
* An error on the server occurred when trying to process a request .
* @ sample AmazonMachineLearning . DescribeDataSources */
@ Override public DescribeDataSourcesResult describeDataSources ( DescribeDataSourcesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeDataSources ( request ) ; |
public class FullscreenVideoView { /** * SurfaceView methods */
@ Override synchronized public void surfaceCreated ( SurfaceHolder holder ) { } } | Log . d ( TAG , "surfaceCreated called = " + currentState ) ; if ( this . mediaPlayer != null ) { this . mediaPlayer . setDisplay ( surfaceHolder ) ; // If is not prepared yet - tryToPrepare ( )
if ( ! this . surfaceIsReady ) { this . surfaceIsReady = true ; if ( this . currentState == State . INITIALIZED || this . currentState == State . PREPARING ) tryToPrepare ( ) ; } } |
public class API { /** * Converts the given { @ code ApiResponse } to { @ code String } representation .
* This is expected to be used just for views and actions .
* @ param format the format to convert to .
* @ param name the name of the view or action .
* @ param res the { @ code ApiResponse } to convert .
* @ return the string representation of the { @ code ApiResponse } .
* @ throws ApiException if an error occurred while converting the response or if the format was not handled .
* @ see # validateFormatForViewAction ( Format ) */
private static String convertViewActionApiResponse ( Format format , String name , ApiResponse res ) throws ApiException { } } | switch ( format ) { case JSON : return res . toJSON ( ) . toString ( ) ; case JSONP : return getJsonpWrapper ( res . toJSON ( ) . toString ( ) ) ; case XML : return responseToXml ( name , res ) ; case HTML : return responseToHtml ( res ) ; default : // Should not happen , format validation should prevent this case . . .
logger . error ( "Unhandled format: " + format ) ; throw new ApiException ( ApiException . Type . INTERNAL_ERROR ) ; } |
public class Function { /** * Returns { @ code true } if the provided signature is valid for this
* function , { @ code false } otherwise .
* You can access the semantic status of invalid signatures ( those returning
* { @ code false } here ) via { @ link # getStatus ( Signature ) getStatus } .
* @ param sig { @ link Signature }
* @ return boolean
* @ see # getStatus ( Signature ) */
public boolean validSignature ( final Signature sig ) { } } | if ( signatures . contains ( sig ) ) return true ; for ( final Signature signature : signatures ) { final SemanticStatus status = signature . matches ( sig ) ; if ( status == VALID ) return true ; } return false ; |
public class PerformanceCachingGoogleCloudStorage { /** * Matches Google Cloud Storage ' s delimiter filtering .
* @ param items the mutable list of items to filter . Items matching the filter conditions will be
* removed from this list .
* @ param bucketName the bucket name to filter for .
* @ param prefix the object name prefix to filter for .
* @ param delimiter the delimiter to filter on . */
private void filter ( List < GoogleCloudStorageItemInfo > items , String bucketName , @ Nullable String prefix , @ Nullable String delimiter ) throws IOException { } } | prefix = nullToEmpty ( prefix ) ; // if delimiter is not specified we don ' t need to filter - out subdirectories
if ( isNullOrEmpty ( delimiter ) ) { // if prefix is not specified it means that we are listing all objects in the bucket
// and we need to exclude bucket from the result
if ( prefix . isEmpty ( ) ) { Iterator < GoogleCloudStorageItemInfo > itr = items . iterator ( ) ; while ( itr . hasNext ( ) ) { GoogleCloudStorageItemInfo item = itr . next ( ) ; if ( item . isBucket ( ) ) { itr . remove ( ) ; break ; } } } return ; } HashSet < String > dirs = new HashSet < > ( ) ; Iterator < GoogleCloudStorageItemInfo > itr = items . iterator ( ) ; while ( itr . hasNext ( ) ) { GoogleCloudStorageItemInfo item = itr . next ( ) ; String objectName = item . getObjectName ( ) ; // 1 . Remove if bucket ( means that listing objects in bucket ) :
// do not return bucket itself ( prefix dir ) to avoid infinite recursion
// 2 . Remove if doesn ' t start with the prefix .
// 3 . Remove prefix object if it ends with delimiter :
// do not return prefix dir to avoid infinite recursion .
if ( item . isBucket ( ) || ! objectName . startsWith ( prefix ) || ( prefix . endsWith ( delimiter ) && objectName . equals ( prefix ) ) ) { itr . remove ( ) ; } else { // Retain if missing the delimiter after the prefix .
int firstIndex = objectName . indexOf ( delimiter , prefix . length ( ) ) ; if ( firstIndex != - 1 ) { // Remove if the first occurrence of the delimiter after the prefix isn ' t the last .
// Remove if the last occurrence of the delimiter isn ' t the end of the string .
int lastIndex = objectName . lastIndexOf ( delimiter ) ; if ( firstIndex != lastIndex || lastIndex != objectName . length ( ) - 1 ) { itr . remove ( ) ; dirs . add ( objectName . substring ( 0 , firstIndex + 1 ) ) ; } } } } // Remove non - implicit directories ( i . e . have corresponding directory objects )
for ( GoogleCloudStorageItemInfo item : items ) { dirs . remove ( item . getObjectName ( ) ) ; } if ( dirs . isEmpty ( ) ) { return ; } List < StorageResourceId > dirIds = new ArrayList < > ( dirs . size ( ) ) ; for ( String dir : dirs ) { dirIds . add ( new StorageResourceId ( bucketName , dir ) ) ; } boolean inferImplicitDirectories = delegateOptions . isInferImplicitDirectoriesEnabled ( ) ; if ( inferImplicitDirectories ) { for ( StorageResourceId dirId : dirIds ) { items . add ( GoogleCloudStorageItemInfo . createInferredDirectory ( dirId ) ) ; } } |
public class HttpServerUpgradeHandler { /** * Determines whether or not the message is an HTTP upgrade request . */
private static boolean isUpgradeRequest ( HttpObject msg ) { } } | return msg instanceof HttpRequest && ( ( HttpRequest ) msg ) . headers ( ) . get ( HttpHeaderNames . UPGRADE ) != null ; |
public class User { /** * Construct a new IRCUser from the give String . The String should be in the
* format { @ literal < nick > ! < user > @ < host > } . This format is the same as is
* used in IRC message prefixes
* @ param prefix The prefix to extract the information from
* @ return a new IRCUser object or null if the prefix could not be parsed */
public static User fromPrefix ( String prefix ) { } } | Matcher matcher = userPrefixPattern . matcher ( prefix ) ; if ( matcher . find ( ) ) { String nick = matcher . group ( 1 ) ; String user = matcher . group ( 4 ) ; String host = matcher . group ( 5 ) ; return new User ( nick , user , host ) ; } else { return null ; } |
public class ListIdentitiesResult { /** * An object containing a set of identities and associated mappings .
* @ param identities
* An object containing a set of identities and associated mappings . */
public void setIdentities ( java . util . Collection < IdentityDescription > identities ) { } } | if ( identities == null ) { this . identities = null ; return ; } this . identities = new java . util . ArrayList < IdentityDescription > ( identities ) ; |
public class DescribeSpotInstanceRequestsResult { /** * One or more Spot Instance requests .
* @ param spotInstanceRequests
* One or more Spot Instance requests . */
public void setSpotInstanceRequests ( java . util . Collection < SpotInstanceRequest > spotInstanceRequests ) { } } | if ( spotInstanceRequests == null ) { this . spotInstanceRequests = null ; return ; } this . spotInstanceRequests = new com . amazonaws . internal . SdkInternalList < SpotInstanceRequest > ( spotInstanceRequests ) ; |
public class ResourcesInner { /** * Deletes a resource by ID .
* @ param resourceId The fully qualified ID of the resource , including the resource name and resource type . Use the format , / subscriptions / { guid } / resourceGroups / { resource - group - name } / { resource - provider - namespace } / { resource - type } / { resource - name }
* @ param apiVersion The API version to use for the operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > beginDeleteByIdAsync ( String resourceId , String apiVersion ) { } } | return beginDeleteByIdWithServiceResponseAsync ( resourceId , apiVersion ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class TaskAPI { /** * Updates the text of the task .
* @ param taskId
* The id of the task
* @ param text
* The new text of the task */
public void updateText ( int taskId , String text ) { } } | getResourceFactory ( ) . getApiResource ( "/task/" + taskId + "/text" ) . entity ( new TaskText ( text ) , MediaType . APPLICATION_JSON_TYPE ) . put ( ) ; |
public class JMXResource { /** * Execute an operation on an mbean .
* @ param formParams
* @ param key
* @ param jsonp
* @ param name */
@ POST @ Consumes ( MediaType . APPLICATION_FORM_URLENCODED ) @ Path ( "{key}/{op}" ) public Response invokeMbeanOperation ( MultivaluedMap < String , String > formParams , @ PathParam ( "key" ) String key , @ QueryParam ( "jsonp" ) String jsonp , @ PathParam ( "op" ) String name ) throws Exception { } } | LOG . info ( "invoke " + key + " op=" + name ) ; MBeanServer mBeanServer = ManagementFactory . getPlatformMBeanServer ( ) ; Map < String , String > params = new TreeMap < String , String > ( ) ; for ( Entry < String , List < String > > entry : formParams . entrySet ( ) ) { if ( entry . getKey ( ) . equals ( "op" ) ) continue ; if ( entry . getValue ( ) . size ( ) > 0 ) params . put ( entry . getKey ( ) , entry . getValue ( ) . get ( 0 ) ) ; else params . put ( entry . getKey ( ) , "" ) ; } ObjectName objName = new ObjectName ( key ) ; MBeanInfo info = mBeanServer . getMBeanInfo ( objName ) ; for ( MBeanOperationInfo op : info . getOperations ( ) ) { if ( op . getName ( ) . equals ( name ) ) { List < String > signature = new ArrayList < String > ( ) ; for ( MBeanParameterInfo s : op . getSignature ( ) ) { signature . add ( s . getType ( ) ) ; } Object result = mBeanServer . invoke ( objName , name , params . values ( ) . toArray ( new String [ params . size ( ) ] ) , signature . toArray ( new String [ signature . size ( ) ] ) ) ; JSONObject json = new JSONObject ( ) ; json . put ( "key" , key ) ; json . put ( "operation" , name ) ; if ( result != null ) { json . put ( "response" , result . toString ( ) ) ; } json . put ( "type" , op . getReturnType ( ) ) ; StringWriter out = new StringWriter ( ) ; if ( jsonp . isEmpty ( ) ) { json . write ( out ) ; } else { out . append ( jsonp ) . append ( "(" ) ; json . write ( out ) ; out . append ( ");" ) ; } return Response . ok ( out . toString ( ) ) . type ( MediaType . APPLICATION_JSON ) . build ( ) ; } } return Response . serverError ( ) . build ( ) ; |
public class Concept { /** * Add a keyword to the concept synset . */
public void addKeyword ( String keyword ) { } } | if ( taxonomy . concepts . containsKey ( keyword ) ) { throw new IllegalArgumentException ( String . format ( "Concept %s already exists." , keyword ) ) ; } taxonomy . concepts . put ( keyword , this ) ; if ( synset == null ) { synset = new TreeSet < > ( ) ; } synset . add ( keyword ) ; |
public class Foreground { /** * Called when the resolution changed .
* @ param width The new width .
* @ param height The new height . */
public final void setScreenSize ( int width , int height ) { } } | screenWidth = width ; screenHeight = height ; final double scaleH = width / ( double ) Scene . NATIVE . getWidth ( ) ; final double scaleV = height / ( double ) Scene . NATIVE . getHeight ( ) ; this . scaleH = scaleH ; this . scaleV = scaleV ; primary . updateMainY ( ) ; secondary . updateMainY ( ) ; |
public class EnumJsonDeserializer { /** * { @ inheritDoc } */
@ Override public E doDeserialize ( JsonReader reader , JsonDeserializationContext ctx , JsonDeserializerParameters params ) { } } | try { return Enum . valueOf ( enumClass , reader . nextString ( ) ) ; } catch ( IllegalArgumentException ex ) { if ( ctx . isReadUnknownEnumValuesAsNull ( ) ) { return null ; } throw ex ; } |
public class CiphererImpl { /** * Encrypts / decrypts a message based on the underlying mode of operation .
* @ param key the encryption key
* @ param initializationVector the initialization vector
* @ param message if in encryption mode , the clear - text message , otherwise
* the message to decrypt
* @ return if in encryption mode , the encrypted message , otherwise the
* decrypted message
* @ throws SymmetricEncryptionException on runtime errors
* @ see # setMode ( Mode ) */
public byte [ ] encrypt ( byte [ ] key , byte [ ] initializationVector , byte [ ] message ) { } } | try { IvParameterSpec initializationVectorSpec = new IvParameterSpec ( initializationVector ) ; final SecretKeySpec skey = new SecretKeySpec ( key , keyAlgorithm ) ; final Cipher cipher = ( ( ( provider == null ) || ( provider . length ( ) == 0 ) ) ? Cipher . getInstance ( cipherAlgorithm ) : Cipher . getInstance ( cipherAlgorithm , provider ) ) ; switch ( mode ) { case ENCRYPT : cipher . init ( Cipher . ENCRYPT_MODE , skey , initializationVectorSpec ) ; break ; case DECRYPT : cipher . init ( Cipher . DECRYPT_MODE , skey , initializationVectorSpec ) ; break ; default : throw new SymmetricEncryptionException ( "error encrypting/decrypting message: invalid mode; mode=" + mode ) ; } return cipher . doFinal ( message ) ; } catch ( Exception e ) { throw new SymmetricEncryptionException ( "error encrypting/decrypting message; mode=" + mode , e ) ; } |
public class RasterLayerComponentImpl { /** * Add image with a exception message in the PDF document .
* @ param context
* PDF context
* @ param e
* exception to put in image */
protected void addLoadError ( PdfContext context , ImageException e ) { } } | Bbox imageBounds = e . getRasterImage ( ) . getBounds ( ) ; float scaleFactor = ( float ) ( 72 / getMap ( ) . getRasterResolution ( ) ) ; float width = ( float ) imageBounds . getWidth ( ) * scaleFactor ; float height = ( float ) imageBounds . getHeight ( ) * scaleFactor ; // subtract screen position of lower - left corner
float x = ( float ) ( imageBounds . getX ( ) - rasterScale * bbox . getMinX ( ) ) * scaleFactor ; // shift y to lower left corner , flip y to user space and subtract
// screen position of lower - left
// corner
float y = ( float ) ( - imageBounds . getY ( ) - imageBounds . getHeight ( ) - rasterScale * bbox . getMinY ( ) ) * scaleFactor ; if ( log . isDebugEnabled ( ) ) { log . debug ( "adding failed message=" + width + ",height=" + height + ",x=" + x + ",y=" + y ) ; } float textHeight = context . getTextSize ( "failed" , ERROR_FONT ) . getHeight ( ) * 3f ; Rectangle rec = new Rectangle ( x , y , x + width , y + height ) ; context . strokeRectangle ( rec , Color . RED , 0.5f ) ; context . drawText ( getNlsString ( "RasterLayerComponent.loaderror.line1" ) , ERROR_FONT , new Rectangle ( x , y + textHeight , x + width , y + height ) , Color . RED ) ; context . drawText ( getNlsString ( "RasterLayerComponent.loaderror.line2" ) , ERROR_FONT , rec , Color . RED ) ; context . drawText ( getNlsString ( "RasterLayerComponent.loaderror.line3" ) , ERROR_FONT , new Rectangle ( x , y - textHeight , x + width , y + height ) , Color . RED ) ; |
public class OrientResourceAuthorizationStrategy { /** * Extract { @ link RequiredOrientResource } s from a Class
* @ param clazz Class to extract { @ link RequiredOrientResource } s from
* @ return statically defined { @ link RequiredOrientResource } s on specified class */
public RequiredOrientResource [ ] getRequiredOrientResources ( Class < ? > clazz ) { } } | RequiredOrientResources resources = clazz . getAnnotation ( RequiredOrientResources . class ) ; RequiredOrientResource singleResource = clazz . getAnnotation ( RequiredOrientResource . class ) ; if ( resources == null && singleResource == null ) return null ; if ( resources != null && singleResource == null ) return resources . value ( ) ; if ( resources == null && singleResource != null ) return new RequiredOrientResource [ ] { singleResource } ; if ( resources != null && singleResource != null ) { RequiredOrientResource [ ] ret = new RequiredOrientResource [ resources . value ( ) . length + 1 ] ; ret [ 0 ] = singleResource ; System . arraycopy ( resources . value ( ) , 0 , ret , 1 , resources . value ( ) . length ) ; return ret ; } return null ; |
public class AbstractElement { /** * The click function and wait based on the ExpectedCondition .
* @ param expectedCondition
* ExpectedCondition < ? > instance to be passed .
* @ return The return value of
* { @ link org . openqa . selenium . support . ui . FluentWait # until ( com . google . common . base . Function ) } if the function
* returned something different from null or false before the timeout expired . < br >
* < pre >
* Grid . driver ( ) . get ( & quot ; https : / / www . paypal . com & quot ; ) ;
* TextField userName = new TextField ( & quot ; login _ email & quot ; ) ;
* TextField password = new TextField ( & quot ; login _ password & quot ; ) ;
* Button btn = new Button ( & quot ; submit . x & quot ; ) ;
* userName . type ( & quot ; exampleId @ paypal . com & quot ; ) ;
* password . type ( & quot ; 123Abcde & quot ; ) ;
* btn . clickAndExpect ( ExpectedConditions . titleIs ( & quot ; MyAccount - PayPal & quot ; ) ) ;
* < / pre > */
public Object clickAndExpect ( ExpectedCondition < ? > expectedCondition ) { } } | dispatcher . beforeClick ( this , expectedCondition ) ; getElement ( ) . click ( ) ; if ( Boolean . parseBoolean ( Config . getConfigProperty ( ConfigProperty . ENABLE_GUI_LOGGING ) ) ) { logUIAction ( UIActions . CLICKED ) ; } if ( parent != null ) { WebDriverWaitUtils . waitUntilPageIsLoaded ( parent . getCurrentPage ( ) ) ; } validatePresenceOfAlert ( ) ; long timeout = Grid . getExecutionTimeoutValue ( ) / 1000 ; WebDriverWait wait = new WebDriverWait ( Grid . driver ( ) , timeout ) ; Object variable = wait . until ( expectedCondition ) ; processScreenShot ( ) ; dispatcher . afterClick ( this , expectedCondition ) ; return variable ; |
public class SavePlayerInfoApi { /** * 对外接口保存玩家信息
* @ param playerInfo 玩家信息
* @ param handler 结果回调 */
public void savePlayerInfo ( GamePlayerInfo playerInfo , final SaveInfoHandler handler ) { } } | HMSAgentLog . i ( "savePlayerInfo:playerInfo=" + StrUtils . objDesc ( playerInfo ) + " handler=" + StrUtils . objDesc ( handler ) ) ; this . playerInfo = playerInfo ; this . handler = handler ; this . retryTimes = MAX_RETRY_TIMES ; connect ( ) ; |
public class SharedDataContextUtils { /** * Recursively replace data references in the values in a map which contains either string , collection or Map
* values .
* @ param input input map
* @ param data context data
* @ return Map with all string values having references replaced */
public static < T extends ViewTraverse < T > > Map < String , Object > replaceDataReferences ( final Map < String , Object > input , final T currentContext , final BiFunction < Integer , String , T > viewMap , final Converter < String , String > converter , final MultiDataContext < T , DataContext > data , boolean failOnUnexpanded , boolean blankIfUnexpanded ) { } } | final HashMap < String , Object > output = new HashMap < > ( ) ; for ( final String s : input . keySet ( ) ) { Object o = input . get ( s ) ; output . put ( s , replaceDataReferencesInObject ( o , currentContext , viewMap , converter , data , failOnUnexpanded , blankIfUnexpanded ) ) ; } return output ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcBSplineCurveFormToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class DateTimeTableEditor { /** * setCellEditorValue , This sets the picker to an appropriate value for the supplied object . If
* the value is null , then picker will be cleared . If the value is a LocalDateTime instance ,
* then the picker will be set to that value . All other types ( including strings ) will be read
* or converted to a string with a maximum length of the first 100 characters . The date picker
* text will be set with the resulting string . */
public void setCellEditorValue ( Object value ) { } } | dateTimePicker . clear ( ) ; if ( value == null ) { return ; } if ( value instanceof LocalDateTime ) { LocalDateTime nativeValue = ( LocalDateTime ) value ; dateTimePicker . setDateTimePermissive ( nativeValue ) ; } else { String text = value . toString ( ) ; String shorterText = InternalUtilities . safeSubstring ( text , 0 , 100 ) ; dateTimePicker . datePicker . setText ( shorterText ) ; } |
public class KmeansCalculator { /** * クラスタリングの中心座標情報のマージを行い 、 マージ結果を新たな配列に設定して返す 。 < br >
* マッピングした中心点同士の平均値を取り 、 結果とする 。 < br >
* @ param baseCentroids ベース中心点配列
* @ param targetCentroids マージ対象中心点配列
* @ param resultMapping ベース学習モデルとマージ対象学習モデルの中心点マッピング
* @ return マージ結果中心点配列 */
public static double [ ] [ ] mergeCentroids ( double [ ] [ ] baseCentroids , double [ ] [ ] targetCentroids , Map < Integer , Integer > resultMapping ) { } } | // マッピングした中心点同士の平均値を取る
double [ ] [ ] mergedCentroids = new double [ resultMapping . size ( ) ] [ ] ; for ( Map . Entry < Integer , Integer > targetEntry : resultMapping . entrySet ( ) ) { double [ ] baseCentroid = baseCentroids [ targetEntry . getKey ( ) ] ; double [ ] targetCentroid = targetCentroids [ targetEntry . getValue ( ) ] ; mergedCentroids [ targetEntry . getKey ( ) ] = average ( baseCentroid , targetCentroid ) ; } return mergedCentroids ; |
public class ValidDBInstanceModificationsMessage { /** * Valid storage options for your DB instance .
* @ param storage
* Valid storage options for your DB instance . */
public void setStorage ( java . util . Collection < ValidStorageOptions > storage ) { } } | if ( storage == null ) { this . storage = null ; return ; } this . storage = new com . amazonaws . internal . SdkInternalList < ValidStorageOptions > ( storage ) ; |
public class Client { /** * Download object by metadata .
* @ param meta Object metadata for stream validation .
* @ param links Object links .
* @ param handler Stream handler .
* @ return Stream handler result .
* @ throws FileNotFoundException File not found exception if object don ' t exists on LFS server .
* @ throws IOException On some errors . */
@ NotNull public < T > T getObject ( @ Nullable final Meta meta , @ NotNull final Links links , @ NotNull final StreamHandler < T > handler ) throws IOException { } } | final Link link = links . getLinks ( ) . get ( LinkType . Download ) ; if ( link == null ) { throw new FileNotFoundException ( ) ; } return doRequest ( link , new ObjectGet < > ( inputStream -> handler . accept ( meta != null ? new InputStreamValidator ( inputStream , meta ) : inputStream ) ) , link . getHref ( ) ) ; |
public class DeviceUtils { /** * Static utility method that extracts the current device from the request attributes map .
* Encapsulates the { @ link HttpServletRequest # getAttribute ( String ) } lookup .
* @ param attributes the request attributes
* @ return the current device , or null if no device has been resolved for the request */
public static Device getCurrentDevice ( RequestAttributes attributes ) { } } | return ( Device ) attributes . getAttribute ( CURRENT_DEVICE_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.