signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AWSCognitoIdentityProviderClient { /** * Deletes the attributes for a user . * @ param deleteUserAttributesRequest * Represents the request to delete user attributes . * @ return Result of the DeleteUserAttributes operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws PasswordResetRequiredException * This exception is thrown when a password reset is required . * @ throws UserNotFoundException * This exception is thrown when a user is not found . * @ throws UserNotConfirmedException * This exception is thrown when a user is not confirmed successfully . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . DeleteUserAttributes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / DeleteUserAttributes " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteUserAttributesResult deleteUserAttributes ( DeleteUserAttributesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteUserAttributes ( request ) ;
public class OrChainMatcher { /** * { @ inheritDoc } */ @ Override public boolean matches ( Object item ) { } }
for ( Matcher < ? > m : matchers ) { if ( m . matches ( item ) ) { return true ; } } return false ;
public class RSA { /** * 使用公钥对内容进行加密 * @ param contentByteArray * @ param publicKey * @ return */ public static byte [ ] encrypt ( byte [ ] contentByteArray , PublicKey publicKey ) { } }
try { Cipher cipher = Cipher . getInstance ( algorithm ) ; cipher . init ( Cipher . ENCRYPT_MODE , publicKey ) ; return cipher . doFinal ( contentByteArray ) ; } catch ( NoSuchAlgorithmException e ) { e . printStackTrace ( ) ; } catch ( NoSuchPaddingException e ) { e . printStackTrace ( ) ; } catch ( InvalidKeyException e ) { e . printStackTrace ( ) ; } catch ( BadPaddingException e ) { e . printStackTrace ( ) ; } catch ( IllegalBlockSizeException e ) { e . printStackTrace ( ) ; } return null ;
public class CommerceDiscountUtil { /** * Returns the first commerce discount in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce discount * @ throws NoSuchDiscountException if a matching commerce discount could not be found */ public static CommerceDiscount findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CommerceDiscount > orderByComparator ) throws com . liferay . commerce . discount . exception . NoSuchDiscountException { } }
return getPersistence ( ) . findByUuid_C_First ( uuid , companyId , orderByComparator ) ;
public class Application { /** * Get the current language . * @ param bCheckLocaleAlso If true , and language has not been set , return the system ' s language * @ return The current language code . */ public String getLanguage ( boolean bCheckLocaleAlso ) { } }
String strLanguage = this . getProperty ( Params . LANGUAGE ) ; if ( ( strLanguage == null ) || ( strLanguage . length ( ) == 0 ) ) if ( bCheckLocaleAlso ) return Locale . getDefault ( ) . getLanguage ( ) ; return strLanguage ;
public class ExtractFunctionExecutor { /** * 删除子CSS路径的内容 使用方法 : deleteChild ( div . ep - source ) * 括号内的参数为相对CSS路径的子路径 , 从CSS路径匹配的文本中删除子路径匹配的文本 * @ param text CSS路径抽取出来的文本 * @ param doc 根文档 * @ param cssPath CSS路径对象 * @ param parseExpression 抽取函数 * @ return 抽取函数处理之后的文本 */ public static String executeDeleteChild ( String text , Document doc , CssPath cssPath , String parseExpression ) { } }
LOGGER . debug ( "deleteChild抽取函数之前:" + text ) ; String parameter = parseExpression . replace ( "deleteChild(" , "" ) ; parameter = parameter . substring ( 0 , parameter . length ( ) - 1 ) ; Elements elements = doc . select ( cssPath . getCssPath ( ) + " " + parameter ) ; for ( Element element : elements ) { String t = element . text ( ) ; if ( StringUtils . isNotBlank ( t ) ) { LOGGER . debug ( "deleteChild抽取函数删除:" + t ) ; text = text . replace ( t , "" ) ; } } LOGGER . debug ( "deleteChild抽取函数之后:" + text ) ; return text ;
public class CustomerContactUrl { /** * Get Resource Url for DeleteAccountContact * @ param accountId Unique identifier of the customer account . * @ param contactId Unique identifer of the customer account contact being updated . * @ return String Resource Url */ public static MozuUrl deleteAccountContactUrl ( Integer accountId , Integer contactId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/customer/accounts/{accountId}/contacts/{contactId}" ) ; formatter . formatUrl ( "accountId" , accountId ) ; formatter . formatUrl ( "contactId" , contactId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class DirectoryEntry { /** * Checks that this entry exists and links to a directory , throwing an exception if not . * @ return this * @ throws NoSuchFileException if this entry does not exist * @ throws NotDirectoryException if this entry does not link to a directory */ public DirectoryEntry requireDirectory ( Path pathForException ) throws NoSuchFileException , NotDirectoryException { } }
requireExists ( pathForException ) ; if ( ! file ( ) . isDirectory ( ) ) { throw new NotDirectoryException ( pathForException . toString ( ) ) ; } return this ;
public class UrlTileGenerator { /** * Set the tile format * @ param tileFormat * tile format */ public void setTileFormat ( TileFormatType tileFormat ) { } }
if ( tileFormat == null ) { tileFormat = TileFormatType . STANDARD ; } else { switch ( tileFormat ) { case STANDARD : case TMS : this . tileFormat = tileFormat ; break ; default : throw new GeoPackageException ( "Unsupported Tile Format Type for URL Tile Generation: " + tileFormat ) ; } }
public class JobBase { /** * Increment the given counter by the given incremental value If the counter * does not exist , one is created with value 0. * @ param name * the counter name * @ param inc * the incremental value * @ return the updated value . */ protected Double addDoubleValue ( Object name , double inc ) { } }
Double val = this . doubleCounters . get ( name ) ; Double retv = null ; if ( val == null ) { retv = new Double ( inc ) ; } else { retv = new Double ( val . doubleValue ( ) + inc ) ; } this . doubleCounters . put ( name , retv ) ; return retv ;
public class ConcurrentConveyor { /** * Called by the drainer thread to signal that it has failed and will drain * no more items from the queue . * @ param t the drainer ' s failure */ public final void drainerFailed ( Throwable t ) { } }
if ( t == null ) { throw new NullPointerException ( "ConcurrentConveyor.drainerFailed(null)" ) ; } drainer = null ; drainerDepartureCause = t ;
public class CatalogSchemaTools { /** * Convert a Table catalog object into the proper SQL DDL , including all indexes , * constraints , and foreign key references . * Also returns just the CREATE TABLE statement , since , like all good methods , * it should have two purposes . . . . * It would be nice to have a separate method to just generate the CREATE TABLE , * but we use that pass to also figure out what separate constraint and index * SQL DDL needs to be generated , so instead , we opt to build the CREATE TABLE DDL * separately as we go here , and then fill it in to the StringBuilder being used * to construct the full canonical DDL at the appropriate time . * @ param sb - the schema being built * @ param catalog _ tbl - object to be analyzed * @ param viewQuery - the Query if this Table is a View * @ param isExportOnly Is this a export table . * @ param streamPartitionColumn stream partition column * @ param streamTarget - true if this Table is an Export Table * @ return SQL Schema text representing the CREATE TABLE statement to generate the table */ public static String toSchema ( StringBuilder sb , Table catalog_tbl , String viewQuery , boolean isExportOnly , String streamPartitionColumn , String streamTarget ) { } }
assert ( ! catalog_tbl . getColumns ( ) . isEmpty ( ) ) ; boolean tableIsView = ( viewQuery != null ) ; // We need the intermediate results of building the table schema string so that // we can return the full CREATE TABLE statement , so accumulate it separately final StringBuilder table_sb = new StringBuilder ( ) ; final Set < Index > skip_indexes = new HashSet < > ( ) ; final Set < Constraint > skip_constraints = new HashSet < > ( ) ; if ( tableIsView ) { table_sb . append ( "CREATE VIEW " ) . append ( catalog_tbl . getTypeName ( ) ) . append ( " (" ) ; } else { if ( TableType . isStream ( catalog_tbl . getTabletype ( ) ) ) { table_sb . append ( "CREATE STREAM " ) . append ( catalog_tbl . getTypeName ( ) ) ; if ( streamPartitionColumn != null && viewQuery == null ) { table_sb . append ( " PARTITION ON COLUMN " ) . append ( streamPartitionColumn ) ; } // Default target means no target . if ( streamTarget != null && ! streamTarget . equalsIgnoreCase ( Constants . DEFAULT_EXPORT_CONNECTOR_NAME ) && TableType . isStream ( catalog_tbl . getTabletype ( ) ) ) { table_sb . append ( " EXPORT TO TARGET " ) . append ( streamTarget ) ; } } else { table_sb . append ( "CREATE TABLE " ) . append ( catalog_tbl . getTypeName ( ) ) ; } table_sb . append ( " (" ) ; } // Columns String add = "\n" ; for ( Column catalog_col : CatalogUtil . getSortedCatalogItems ( catalog_tbl . getColumns ( ) , "index" ) ) { VoltType col_type = VoltType . get ( ( byte ) catalog_col . getType ( ) ) ; // this next assert would be great if we dealt with default values well // assert ( ! ( ( catalog _ col . getDefaultvalue ( ) = = null ) & & ( catalog _ col . getNullable ( ) = = false ) ) ) ; if ( tableIsView ) { table_sb . append ( add ) . append ( spacer ) . append ( catalog_col . getTypeName ( ) ) ; add = ",\n" ; continue ; } table_sb . append ( add ) . append ( spacer ) . append ( catalog_col . getTypeName ( ) ) . append ( " " ) . append ( col_type . toSQLString ( ) ) . append ( col_type . isVariableLength ( ) && catalog_col . getSize ( ) > 0 ? "(" + catalog_col . getSize ( ) + ( catalog_col . getInbytes ( ) ? " BYTES" : "" ) + ")" : "" ) ; // Default value String defaultvalue = catalog_col . getDefaultvalue ( ) ; // VoltType defaulttype = VoltType . get ( ( byte ) catalog _ col . getDefaulttype ( ) ) ; boolean nullable = catalog_col . getNullable ( ) ; // TODO : Shouldn ' t have to check whether the string contains " null " if ( defaultvalue == null ) { } else if ( defaultvalue . toLowerCase ( ) . equals ( "null" ) && nullable ) { defaultvalue = null ; } else { if ( col_type == VoltType . TIMESTAMP ) { if ( defaultvalue . startsWith ( "CURRENT_TIMESTAMP" ) ) { defaultvalue = "CURRENT_TIMESTAMP" ; } else { assert ( defaultvalue . matches ( "[0-9]+" ) ) ; long epoch = Long . parseLong ( defaultvalue ) ; Date d = new Date ( epoch / 1000 ) ; SimpleDateFormat sdf = new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss" ) ; defaultvalue = "\'" + sdf . format ( d ) + "." + StringUtils . leftPad ( String . valueOf ( epoch % 1000000 ) , 6 , "0" ) + "\'" ; } } else { // XXX : if ( defaulttype ! = VoltType . VOLTFUNCTION ) { // TODO : Escape strings properly defaultvalue = defaultvalue . replace ( "\'" , "\'\'" ) ; defaultvalue = "'" + defaultvalue + "'" ; } } if ( defaultvalue == null ) { table_sb . append ( ( ! nullable ? " NOT NULL" : "" ) ) ; } else { table_sb . append ( " DEFAULT " ) . append ( defaultvalue != null ? defaultvalue : "NULL" ) . append ( ! nullable ? " NOT NULL" : "" ) ; } // Single - column constraints for ( ConstraintRef catalog_const_ref : catalog_col . getConstraints ( ) ) { Constraint catalog_const = catalog_const_ref . getConstraint ( ) ; ConstraintType const_type = ConstraintType . get ( catalog_const . getType ( ) ) ; // Check if there is another column in our table with the same constraint // If there is , then we need to add it to the end of the table definition boolean found = false ; for ( Column catalog_other_col : catalog_tbl . getColumns ( ) ) { if ( catalog_other_col . equals ( catalog_col ) ) continue ; if ( catalog_other_col . getConstraints ( ) . getIgnoreCase ( catalog_const . getTypeName ( ) ) != null ) { found = true ; break ; } } if ( ! found ) { switch ( const_type ) { case FOREIGN_KEY : { Table catalog_fkey_tbl = catalog_const . getForeignkeytable ( ) ; Column catalog_fkey_col = null ; for ( ColumnRef ref : catalog_const . getForeignkeycols ( ) ) { catalog_fkey_col = ref . getColumn ( ) ; break ; // Nasty hack to get first item } assert ( catalog_fkey_col != null ) ; table_sb . append ( " REFERENCES " ) . append ( catalog_fkey_tbl . getTypeName ( ) ) . append ( " (" ) . append ( catalog_fkey_col . getTypeName ( ) ) . append ( ")" ) ; skip_constraints . add ( catalog_const ) ; break ; } default : // Nothing for now } } } add = ",\n" ; } // Constraints for ( Constraint catalog_const : catalog_tbl . getConstraints ( ) ) { if ( skip_constraints . contains ( catalog_const ) ) continue ; ConstraintType const_type = ConstraintType . get ( catalog_const . getType ( ) ) ; // Primary Keys / Unique Constraints if ( const_type == ConstraintType . PRIMARY_KEY || const_type == ConstraintType . UNIQUE ) { Index catalog_idx = catalog_const . getIndex ( ) ; if ( ! tableIsView ) { // Get the ConstraintType . table_sb . append ( add ) . append ( spacer ) ; if ( ! catalog_const . getTypeName ( ) . startsWith ( HSQLInterface . AUTO_GEN_PREFIX ) ) { table_sb . append ( "CONSTRAINT " ) . append ( catalog_const . getTypeName ( ) ) . append ( " " ) ; } if ( const_type == ConstraintType . PRIMARY_KEY || const_type == ConstraintType . UNIQUE ) { if ( const_type == ConstraintType . PRIMARY_KEY ) { table_sb . append ( "PRIMARY KEY (" ) ; } else { if ( catalog_idx . getAssumeunique ( ) ) { table_sb . append ( "ASSUMEUNIQUE (" ) ; } else { table_sb . append ( "UNIQUE (" ) ; } } String col_add = "" ; if ( catalog_idx . getExpressionsjson ( ) != null && ! catalog_idx . getExpressionsjson ( ) . equals ( "" ) ) { String exprStrings = new String ( ) ; StmtTargetTableScan tableScan = new StmtTargetTableScan ( catalog_tbl ) ; try { List < AbstractExpression > expressions = AbstractExpression . fromJSONArrayString ( catalog_idx . getExpressionsjson ( ) , tableScan ) ; String sep = "" ; for ( AbstractExpression expr : expressions ) { exprStrings += sep + expr . explain ( catalog_tbl . getTypeName ( ) ) ; sep = "," ; } } catch ( JSONException e ) { } table_sb . append ( col_add ) . append ( exprStrings ) ; } else { for ( ColumnRef catalog_colref : CatalogUtil . getSortedCatalogItems ( catalog_idx . getColumns ( ) , "index" ) ) { table_sb . append ( col_add ) . append ( catalog_colref . getColumn ( ) . getTypeName ( ) ) ; col_add = ", " ; } // FOR } table_sb . append ( ")" ) ; } } if ( catalog_idx . getTypeName ( ) . startsWith ( HSQLInterface . AUTO_GEN_PREFIX ) || catalog_idx . getTypeName ( ) . startsWith ( HSQLInterface . AUTO_GEN_MATVIEW ) ) { skip_indexes . add ( catalog_idx ) ; } // Foreign Key } else if ( const_type == ConstraintType . FOREIGN_KEY ) { Table catalog_fkey_tbl = catalog_const . getForeignkeytable ( ) ; String col_add = "" ; String our_columns = "" ; String fkey_columns = "" ; for ( ColumnRef catalog_colref : catalog_const . getForeignkeycols ( ) ) { // The name of the ColumnRef is the column in our base table Column our_column = catalog_tbl . getColumns ( ) . getIgnoreCase ( catalog_colref . getTypeName ( ) ) ; assert ( our_column != null ) ; our_columns += col_add + our_column . getTypeName ( ) ; Column fkey_column = catalog_colref . getColumn ( ) ; assert ( fkey_column != null ) ; fkey_columns += col_add + fkey_column . getTypeName ( ) ; col_add = ", " ; } table_sb . append ( add ) . append ( spacer + "CONSTRAINT " ) . append ( catalog_const . getTypeName ( ) ) . append ( " FOREIGN KEY (" ) . append ( our_columns ) . append ( ") REFERENCES " ) . append ( catalog_fkey_tbl . getTypeName ( ) ) . append ( " (" ) . append ( fkey_columns ) . append ( ")" ) ; } skip_constraints . add ( catalog_const ) ; } if ( catalog_tbl . getTuplelimit ( ) != Integer . MAX_VALUE ) { table_sb . append ( add ) . append ( spacer + "LIMIT PARTITION ROWS " ) . append ( String . valueOf ( catalog_tbl . getTuplelimit ( ) ) ) ; String deleteStmt = CatalogUtil . getLimitPartitionRowsDeleteStmt ( catalog_tbl ) ; if ( deleteStmt != null ) { if ( deleteStmt . endsWith ( ";" ) ) { // StatementCompiler appends the semicolon , we don ' t want it here . deleteStmt = deleteStmt . substring ( 0 , deleteStmt . length ( ) - 1 ) ; } table_sb . append ( "\n" + spacer + spacer + "EXECUTE (" ) . append ( deleteStmt ) . append ( ")" ) ; } } if ( viewQuery != null ) { table_sb . append ( "\n) AS \n" ) ; table_sb . append ( spacer ) . append ( viewQuery ) . append ( ";\n" ) ; } else { table_sb . append ( "\n)" ) ; TimeToLive ttl = catalog_tbl . getTimetolive ( ) . get ( TimeToLiveVoltDB . TTL_NAME ) ; if ( ttl != null ) { table_sb . append ( " USING TTL " + ttl . getTtlvalue ( ) + " " ) ; if ( ttl . getTtlunit ( ) != null ) { table_sb . append ( ttl . getTtlunit ( ) ) ; } table_sb . append ( " ON COLUMN " + ttl . getTtlcolumn ( ) . getTypeName ( ) ) ; table_sb . append ( " BATCH_SIZE " + ttl . getBatchsize ( ) ) ; table_sb . append ( " MAX_FREQUENCY " + ttl . getMaxfrequency ( ) + " " ) ; if ( ttl . getMigrationtarget ( ) != null && ! "" . equals ( ttl . getMigrationtarget ( ) ) ) { assert ( TableType . isPersistentMigrate ( catalog_tbl . getTabletype ( ) ) ) ; table_sb . append ( " MIGRATE TO TARGET " + ttl . getMigrationtarget ( ) + " " ) ; } } table_sb . append ( ";\n" ) ; } // We ' ve built the full CREATE TABLE statement for this table , // Append the generated table schema to the canonical DDL StringBuilder sb . append ( table_sb . toString ( ) ) ; // Partition Table for regular tables ( non - streams ) if ( catalog_tbl . getPartitioncolumn ( ) != null && viewQuery == null && ! isExportOnly ) { sb . append ( "PARTITION TABLE " ) . append ( catalog_tbl . getTypeName ( ) ) . append ( " ON COLUMN " ) . append ( catalog_tbl . getPartitioncolumn ( ) . getTypeName ( ) ) . append ( ";\n" ) ; } // All other Indexes for ( Index catalog_idx : catalog_tbl . getIndexes ( ) ) { if ( skip_indexes . contains ( catalog_idx ) ) continue ; if ( catalog_idx . getUnique ( ) ) { if ( catalog_idx . getAssumeunique ( ) ) { sb . append ( "CREATE ASSUMEUNIQUE INDEX " ) ; } else { sb . append ( "CREATE UNIQUE INDEX " ) ; } } else { // MITGRATE flag does not imply any changes on the " CREATE INDEX " syntax sb . append ( "CREATE INDEX " ) ; } sb . append ( catalog_idx . getTypeName ( ) ) . append ( " ON " ) . append ( catalog_tbl . getTypeName ( ) ) . append ( " (" ) ; add = "" ; String jsonstring = catalog_idx . getExpressionsjson ( ) ; if ( jsonstring . isEmpty ( ) ) { for ( ColumnRef catalog_colref : CatalogUtil . getSortedCatalogItems ( catalog_idx . getColumns ( ) , "index" ) ) { sb . append ( add ) . append ( catalog_colref . getColumn ( ) . getTypeName ( ) ) ; add = ", " ; } } else { List < AbstractExpression > indexedExprs = null ; try { indexedExprs = AbstractExpression . fromJSONArrayString ( jsonstring , new StmtTargetTableScan ( catalog_tbl ) ) ; } catch ( JSONException e ) { // TODO Auto - generated catch block e . printStackTrace ( ) ; } if ( indexedExprs != null ) { for ( AbstractExpression expr : indexedExprs ) { sb . append ( add ) . append ( expr . explain ( catalog_tbl . getTypeName ( ) ) ) ; add = ", " ; } } } sb . append ( ")" ) ; String jsonPredicate = catalog_idx . getPredicatejson ( ) ; if ( ! jsonPredicate . isEmpty ( ) ) { try { AbstractExpression predicate = AbstractExpression . fromJSONString ( jsonPredicate , new StmtTargetTableScan ( catalog_tbl ) ) ; sb . append ( " WHERE " ) . append ( predicate . explain ( catalog_tbl . getTypeName ( ) ) ) ; } catch ( JSONException e ) { // TODO Auto - generated catch block e . printStackTrace ( ) ; } } sb . append ( ";\n" ) ; } if ( catalog_tbl . getIsdred ( ) ) { sb . append ( "DR TABLE " ) . append ( catalog_tbl . getTypeName ( ) ) . append ( ";\n" ) ; } sb . append ( "\n" ) ; // Canonical DDL generation for this table is done , now just hand the CREATE TABLE // statement to whoever might be interested ( DDLCompiler , I ' m looking in your direction ) return table_sb . toString ( ) ;
public class LinkedServersInner { /** * Adds a linked server to the Redis cache ( requires Premium SKU ) . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param linkedServerName The name of the linked server that is being added to the Redis cache . * @ param parameters Parameters supplied to the Create Linked server operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RedisLinkedServerWithPropertiesInner object if successful . */ public RedisLinkedServerWithPropertiesInner beginCreate ( String resourceGroupName , String name , String linkedServerName , RedisLinkedServerCreateParameters parameters ) { } }
return beginCreateWithServiceResponseAsync ( resourceGroupName , name , linkedServerName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class FreeMarkerConfigurationFactory { /** * Determine a FreeMarker TemplateLoader for the given path . * @ param templateLoaderPath the path to load templates from * @ return an appropriate TemplateLoader * @ throws IOException if an I / O error has occurred * @ see freemarker . cache . FileTemplateLoader */ protected TemplateLoader getTemplateLoaderForPath ( String templateLoaderPath ) throws IOException { } }
if ( templateLoaderPath . startsWith ( ResourceUtils . CLASSPATH_URL_PREFIX ) ) { String basePackagePath = templateLoaderPath . substring ( ResourceUtils . CLASSPATH_URL_PREFIX . length ( ) ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Template loader path [" + templateLoaderPath + "] resolved to class path [" + basePackagePath + "]" ) ; } return new ClassTemplateLoader ( environment . getClassLoader ( ) , basePackagePath ) ; } else if ( templateLoaderPath . startsWith ( ResourceUtils . FILE_URL_PREFIX ) ) { File file = new File ( templateLoaderPath . substring ( ResourceUtils . FILE_URL_PREFIX . length ( ) ) ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Template loader path [" + templateLoaderPath + "] resolved to file path [" + file . getAbsolutePath ( ) + "]" ) ; } return new FileTemplateLoader ( file ) ; } else { File file = new File ( environment . getBasePath ( ) , templateLoaderPath ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Template loader path [" + templateLoaderPath + "] resolved to file path [" + file . getAbsolutePath ( ) + "]" ) ; } return new FileTemplateLoader ( file ) ; }
public class ICUHumanize { /** * Same as { @ link # pluralize ( String , Number , Object . . . ) } for the target * locale . * @ param locale * The target locale * @ param pattern * The formatting pattern with plural rules * @ param value * The number that will trigger plural category * @ param args * Optional arguments for the formatting pattern * @ return a properly formatted message */ public static String pluralize ( final Locale locale , final String pattern , final Number value , final Object ... args ) { } }
return withinLocale ( new Callable < String > ( ) { @ Override public String call ( ) throws Exception { return pluralize ( pattern , value , args ) ; } } , locale ) ;
public class CommerceAddressLocalServiceUtil { /** * Creates a new commerce address with the primary key . Does not add the commerce address to the database . * @ param commerceAddressId the primary key for the new commerce address * @ return the new commerce address */ public static com . liferay . commerce . model . CommerceAddress createCommerceAddress ( long commerceAddressId ) { } }
return getService ( ) . createCommerceAddress ( commerceAddressId ) ;
public class PdfSignatureAppearance { /** * This is the first method to be called when using external signatures . The general sequence is : * preClose ( ) , getDocumentBytes ( ) and close ( ) . * If calling preClose ( ) < B > dont ' t < / B > call PdfStamper . close ( ) . * If using an external signature < CODE > exclusionSizes < / CODE > must contain at least * the < CODE > PdfName . CONTENTS < / CODE > key with the size that it will take in the * document . Note that due to the hex string coding this size should be * byte _ size * 2 + 2. * @ param exclusionSizes a < CODE > HashMap < / CODE > with names and sizes to be excluded in the signature * calculation . The key is a < CODE > PdfName < / CODE > and the value an * < CODE > Integer < / CODE > . At least the < CODE > PdfName . CONTENTS < / CODE > must be present * @ throws IOException on error * @ throws DocumentException on error */ public void preClose ( HashMap exclusionSizes ) throws IOException , DocumentException { } }
if ( preClosed ) throw new DocumentException ( "Document already pre closed." ) ; preClosed = true ; AcroFields af = writer . getAcroFields ( ) ; String name = getFieldName ( ) ; boolean fieldExists = ! ( isInvisible ( ) || isNewField ( ) ) ; PdfIndirectReference refSig = writer . getPdfIndirectReference ( ) ; writer . setSigFlags ( 3 ) ; if ( fieldExists ) { PdfDictionary widget = af . getFieldItem ( name ) . getWidget ( 0 ) ; writer . markUsed ( widget ) ; widget . put ( PdfName . P , writer . getPageReference ( getPage ( ) ) ) ; widget . put ( PdfName . V , refSig ) ; PdfObject obj = PdfReader . getPdfObjectRelease ( widget . get ( PdfName . F ) ) ; int flags = 0 ; if ( obj != null && obj . isNumber ( ) ) flags = ( ( PdfNumber ) obj ) . intValue ( ) ; flags |= PdfAnnotation . FLAGS_LOCKED ; widget . put ( PdfName . F , new PdfNumber ( flags ) ) ; PdfDictionary ap = new PdfDictionary ( ) ; ap . put ( PdfName . N , getAppearance ( ) . getIndirectReference ( ) ) ; widget . put ( PdfName . AP , ap ) ; } else { PdfFormField sigField = PdfFormField . createSignature ( writer ) ; sigField . setFieldName ( name ) ; sigField . put ( PdfName . V , refSig ) ; sigField . setFlags ( PdfAnnotation . FLAGS_PRINT | PdfAnnotation . FLAGS_LOCKED ) ; int pagen = getPage ( ) ; if ( ! isInvisible ( ) ) sigField . setWidget ( getPageRect ( ) , null ) ; else sigField . setWidget ( new Rectangle ( 0 , 0 ) , null ) ; sigField . setAppearance ( PdfAnnotation . APPEARANCE_NORMAL , getAppearance ( ) ) ; sigField . setPage ( pagen ) ; writer . addAnnotation ( sigField , pagen ) ; } exclusionLocations = new HashMap ( ) ; if ( cryptoDictionary == null ) { if ( PdfName . ADOBE_PPKLITE . equals ( getFilter ( ) ) ) sigStandard = new PdfSigGenericPKCS . PPKLite ( getProvider ( ) ) ; else if ( PdfName . ADOBE_PPKMS . equals ( getFilter ( ) ) ) sigStandard = new PdfSigGenericPKCS . PPKMS ( getProvider ( ) ) ; else if ( PdfName . VERISIGN_PPKVS . equals ( getFilter ( ) ) ) sigStandard = new PdfSigGenericPKCS . VeriSign ( getProvider ( ) ) ; else throw new IllegalArgumentException ( "Unknown filter: " + getFilter ( ) ) ; sigStandard . setExternalDigest ( externalDigest , externalRSAdata , digestEncryptionAlgorithm ) ; if ( getReason ( ) != null ) sigStandard . setReason ( getReason ( ) ) ; if ( getLocation ( ) != null ) sigStandard . setLocation ( getLocation ( ) ) ; if ( getContact ( ) != null ) sigStandard . setContact ( getContact ( ) ) ; sigStandard . put ( PdfName . M , new PdfDate ( getSignDate ( ) ) ) ; sigStandard . setSignInfo ( getPrivKey ( ) , getCertChain ( ) , getCrlList ( ) ) ; PdfString contents = ( PdfString ) sigStandard . get ( PdfName . CONTENTS ) ; PdfLiteral lit = new PdfLiteral ( ( contents . toString ( ) . length ( ) + ( PdfName . ADOBE_PPKLITE . equals ( getFilter ( ) ) ? 0 : 64 ) ) * 2 + 2 ) ; exclusionLocations . put ( PdfName . CONTENTS , lit ) ; sigStandard . put ( PdfName . CONTENTS , lit ) ; lit = new PdfLiteral ( 80 ) ; exclusionLocations . put ( PdfName . BYTERANGE , lit ) ; sigStandard . put ( PdfName . BYTERANGE , lit ) ; if ( certificationLevel > 0 ) { addDocMDP ( sigStandard ) ; } if ( signatureEvent != null ) signatureEvent . getSignatureDictionary ( sigStandard ) ; writer . addToBody ( sigStandard , refSig , false ) ; } else { PdfLiteral lit = new PdfLiteral ( 80 ) ; exclusionLocations . put ( PdfName . BYTERANGE , lit ) ; cryptoDictionary . put ( PdfName . BYTERANGE , lit ) ; for ( Iterator it = exclusionSizes . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) it . next ( ) ; PdfName key = ( PdfName ) entry . getKey ( ) ; Integer v = ( Integer ) entry . getValue ( ) ; lit = new PdfLiteral ( v . intValue ( ) ) ; exclusionLocations . put ( key , lit ) ; cryptoDictionary . put ( key , lit ) ; } if ( certificationLevel > 0 ) addDocMDP ( cryptoDictionary ) ; if ( signatureEvent != null ) signatureEvent . getSignatureDictionary ( cryptoDictionary ) ; writer . addToBody ( cryptoDictionary , refSig , false ) ; } if ( certificationLevel > 0 ) { // add DocMDP entry to root PdfDictionary docmdp = new PdfDictionary ( ) ; docmdp . put ( new PdfName ( "DocMDP" ) , refSig ) ; writer . reader . getCatalog ( ) . put ( new PdfName ( "Perms" ) , docmdp ) ; } writer . close ( stamper . getMoreInfo ( ) ) ; range = new int [ exclusionLocations . size ( ) * 2 ] ; int byteRangePosition = ( ( PdfLiteral ) exclusionLocations . get ( PdfName . BYTERANGE ) ) . getPosition ( ) ; exclusionLocations . remove ( PdfName . BYTERANGE ) ; int idx = 1 ; for ( Iterator it = exclusionLocations . values ( ) . iterator ( ) ; it . hasNext ( ) ; ) { PdfLiteral lit = ( PdfLiteral ) it . next ( ) ; int n = lit . getPosition ( ) ; range [ idx ++ ] = n ; range [ idx ++ ] = lit . getPosLength ( ) + n ; } Arrays . sort ( range , 1 , range . length - 1 ) ; for ( int k = 3 ; k < range . length - 2 ; k += 2 ) range [ k ] -= range [ k - 1 ] ; if ( tempFile == null ) { bout = sigout . getBuffer ( ) ; boutLen = sigout . size ( ) ; range [ range . length - 1 ] = boutLen - range [ range . length - 2 ] ; ByteBuffer bf = new ByteBuffer ( ) ; bf . append ( '[' ) ; for ( int k = 0 ; k < range . length ; ++ k ) bf . append ( range [ k ] ) . append ( ' ' ) ; bf . append ( ']' ) ; System . arraycopy ( bf . getBuffer ( ) , 0 , bout , byteRangePosition , bf . size ( ) ) ; } else { try { raf = new RandomAccessFile ( tempFile , "rw" ) ; int boutLen = ( int ) raf . length ( ) ; range [ range . length - 1 ] = boutLen - range [ range . length - 2 ] ; ByteBuffer bf = new ByteBuffer ( ) ; bf . append ( '[' ) ; for ( int k = 0 ; k < range . length ; ++ k ) bf . append ( range [ k ] ) . append ( ' ' ) ; bf . append ( ']' ) ; raf . seek ( byteRangePosition ) ; raf . write ( bf . getBuffer ( ) , 0 , bf . size ( ) ) ; } catch ( IOException e ) { try { raf . close ( ) ; } catch ( Exception ee ) { } try { tempFile . delete ( ) ; } catch ( Exception ee ) { } throw e ; } }
public class AbstractContextSelectToolbarStatusPanel { /** * Method used to setup the toolbar elements . Should not usually be overriden . Instead , use the * { @ link # addToolBarElements ( JToolBar , short , int ) } method to add elements at various points . * @ param toolbar the tool bar of the status panel */ protected void setupToolbarElements ( JToolBar toolbar ) { } }
int x = 0 ; Insets insets = new Insets ( 0 , 4 , 0 , 2 ) ; x = this . addToolBarElements ( toolbar , TOOLBAR_LOCATION_START , x ) ; toolbar . add ( new JLabel ( Constant . messages . getString ( panelPrefix + ".toolbar.context.label" ) ) , LayoutHelper . getGBC ( x ++ , 0 , 1 , 0 , insets ) ) ; toolbar . add ( getContextSelectComboBox ( ) , LayoutHelper . getGBC ( x ++ , 0 , 1 , 0 , insets ) ) ; x = this . addToolBarElements ( toolbar , TOOLBAR_LOCATION_AFTER_CONTEXTS_SELECT , x ) ; toolbar . add ( new JLabel ( ) , LayoutHelper . getGBC ( x ++ , 0 , 1 , 1.0 ) ) ; // Spacer if ( hasOptions ( ) ) { toolbar . add ( getOptionsButton ( ) , LayoutHelper . getGBC ( x ++ , 0 , 1 , 0 , insets ) ) ; } this . addToolBarElements ( toolbar , TOOLBAR_LOCATION_END , x ) ;
public class AbstractQuotaPersister { /** * { @ inheritDoc } */ public void removeGroupOfNodesAndDataSize ( String repositoryName , String workspaceName , String patternPath ) { } }
removeGroupOfNodesQuota ( repositoryName , workspaceName , patternPath ) ; // removes data size for all nodes matched by pattern // only if only quota was not set explicitly for ( String nodePath : getAllTrackedNodes ( repositoryName , workspaceName ) ) { if ( PathPatternUtils . acceptName ( patternPath , nodePath ) ) { try { getNodeQuota ( repositoryName , workspaceName , nodePath ) ; } catch ( UnknownQuotaLimitException e ) { removeNodeDataSize ( repositoryName , workspaceName , nodePath ) ; } } }
public class Compiler { /** * This is a convenience method which creates a compiler and then invokes the < code > process < / code > method . * @ param options compiler options to use for the created compiler * @ param objectNames object template names to compile / build ; these will be looked - up on the load path * @ param tplFiles absolute file names of templates to process * @ return results from the compilation / build */ public static CompilerResults run ( CompilerOptions options , List < String > objectNames , Collection < File > tplFiles ) { } }
return ( new Compiler ( options , objectNames , tplFiles ) ) . process ( ) ;
public class BeanDeploymentArchiveImpl { /** * ( non - Javadoc ) * @ see org . jboss . weld . bootstrap . spi . BeanDeploymentArchive # getBeansXml ( ) */ @ Override public BeansXml getBeansXml ( ) { } }
if ( this . beansXml == null ) { Resource beansXmlResource = archive . getBeansXml ( ) ; if ( beansXmlResource != null ) { URL beansXmlUrl = beansXmlResource . getURL ( ) ; Bootstrap bootstrap = getCDIDeployment ( ) . getBootstrap ( ) ; final ClassLoader origTCCL = getContextClassLoader ( ) ; try { // Must use this class ' s loader as the context classloader to ensure // that we load Liberty ' s XML parser rather than any parser defined // in the application . setContextClassLoader ( BeanDeploymentArchiveImpl . class . getClassLoader ( ) ) ; beansXml = bootstrap . parse ( beansXmlUrl ) ; } finally { setContextClassLoader ( origTCCL ) ; } } } return this . beansXml ;
public class SimpleDoubleSubmitManager { @ Override public synchronized void resetToken ( Class < ? > groupType ) { } }
getSessionTokenMap ( ) . ifPresent ( tokenMap -> { showRemovingToken ( groupType , tokenMap ) ; tokenMap . remove ( groupType ) ; if ( tokenMap . isEmpty ( ) ) { removeTokenFromSession ( ) ; } } ) . orElse ( ( ) -> { removeTokenFromSession ( ) ; } ) ;
public class HtmlDoctype { /** * < p > Return the value of the < code > rootElement < / code > property . < / p > * < p > Contents : < div class = " changed _ added _ 2_1 " > * The root XML element * < / div > */ public java . lang . String getRootElement ( ) { } }
return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . rootElement ) ;
public class WireTraceWrapper { /** * Wrap a pre - existing ChaiProvider with a WatchdogWrapper instance . * @ param chaiProvider a pre - existing { @ code ChaiProvider } * @ return a wrapped { @ code ChaiProvider } instance . */ static ChaiProviderImplementor forProvider ( final ChaiProviderImplementor chaiProvider ) { } }
// check to make sure watchdog ise enabled ; final boolean watchDogEnabled = Boolean . parseBoolean ( chaiProvider . getChaiConfiguration ( ) . getSetting ( ChaiSetting . WIRETRACE_ENABLE ) ) ; if ( ! watchDogEnabled ) { final String errorStr = "attempt to obtain WireTrace wrapper when watchdog is not enabled in chai config" ; LOGGER . warn ( errorStr ) ; throw new IllegalStateException ( errorStr ) ; } if ( Proxy . isProxyClass ( chaiProvider . getClass ( ) ) && chaiProvider instanceof WireTraceWrapper ) { LOGGER . warn ( "attempt to obtain WireTraceWrapper wrapper for already wrapped Provider." ) ; return chaiProvider ; } return ( ChaiProviderImplementor ) Proxy . newProxyInstance ( chaiProvider . getClass ( ) . getClassLoader ( ) , chaiProvider . getClass ( ) . getInterfaces ( ) , new WireTraceWrapper ( chaiProvider ) ) ;
public class JsonDeserializerParameters { /** * < p > addIgnoredProperty < / p > * @ param ignoredProperty a { @ link java . lang . String } object . * @ return a { @ link com . github . nmorel . gwtjackson . client . JsonDeserializerParameters } object . */ public JsonDeserializerParameters addIgnoredProperty ( String ignoredProperty ) { } }
if ( null == ignoredProperties ) { ignoredProperties = new HashSet < String > ( ) ; } ignoredProperties . add ( ignoredProperty ) ; return this ;
public class DoubleStream { /** * Performs an action for each element of this stream . * < p > This is a terminal operation . * @ param action the action to be performed on each element */ public void forEach ( @ NotNull DoubleConsumer action ) { } }
while ( iterator . hasNext ( ) ) { action . accept ( iterator . nextDouble ( ) ) ; }
public class SystemProperties { /** * Gets a { @ code long } system property value . * @ param clazz the { @ linkplain Class } to derive the property key from . * @ param key the property key ( relative to the submitted { @ linkplain Class } ) to get . * @ param defaultValue the default value to return in case the property is not defined . * @ return the property value or the submitted default value if the property is not defined . */ public static long longValue ( Class < ? > clazz , String key , long defaultValue ) { } }
return longValue ( clazz . getName ( ) + key , defaultValue ) ;
public class AssociateDeviceWithPlacementRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AssociateDeviceWithPlacementRequest associateDeviceWithPlacementRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( associateDeviceWithPlacementRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( associateDeviceWithPlacementRequest . getProjectName ( ) , PROJECTNAME_BINDING ) ; protocolMarshaller . marshall ( associateDeviceWithPlacementRequest . getPlacementName ( ) , PLACEMENTNAME_BINDING ) ; protocolMarshaller . marshall ( associateDeviceWithPlacementRequest . getDeviceId ( ) , DEVICEID_BINDING ) ; protocolMarshaller . marshall ( associateDeviceWithPlacementRequest . getDeviceTemplateName ( ) , DEVICETEMPLATENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StreamBuilderImpl { /** * reduce ( ( x , y ) - > op ) with a sync binary function * < code > < pre > * s = t1; * s = op ( s , t2 ) ; * s = op ( s , t3 ) ; * result = s ; * < / pre > < / code > */ @ Override public StreamBuilderImpl < T , U > reduce ( BinaryOperatorSync < T > op ) { } }
return new ReduceOpSync < > ( this , op ) ;
public class NodeTypeDataHierarchyHolder { /** * Returns the < i > direct < / i > subtypes of this node type in the node type * inheritance hierarchy , that is , those which actually declared this node * type in their list of supertypes . * @ return */ public Set < InternalQName > getDeclaredSubtypes ( final InternalQName nodeTypeName ) { } }
Set < InternalQName > resultSet = new HashSet < InternalQName > ( ) ; for ( Map . Entry < InternalQName , NodeTypeHolder > entry : nodeTypes . entrySet ( ) ) { InternalQName [ ] declaredSupertypeNames = entry . getValue ( ) . nodeType . getDeclaredSupertypeNames ( ) ; for ( int i = 0 ; i < declaredSupertypeNames . length ; i ++ ) { if ( nodeTypeName . equals ( declaredSupertypeNames [ i ] ) ) resultSet . add ( entry . getKey ( ) ) ; } } return resultSet ;
public class CalculateDateExtensions { /** * Substract days to the given Date object and returns it . * @ param date * The Date object to substract the days . * @ param substractDays * The days to substract . * @ return The resulted Date object . */ public static Date substractDaysFromDate ( final Date date , final int substractDays ) { } }
final Calendar dateOnCalendar = Calendar . getInstance ( ) ; dateOnCalendar . setTime ( date ) ; dateOnCalendar . add ( Calendar . DATE , substractDays * - 1 ) ; return dateOnCalendar . getTime ( ) ;
public class Tag { /** * Creates a { @ code Tag } from the given key , value and metadata . * @ param key the tag key . * @ param value the tag value . * @ param tagMetadata the tag metadata . * @ return a { @ code Tag } . * @ since 0.20 */ public static Tag create ( TagKey key , TagValue value , TagMetadata tagMetadata ) { } }
return new AutoValue_Tag ( key , value , tagMetadata ) ;
public class QueryManagerImpl { /** * { @ inheritDoc } */ public Query createQuery ( String statement , String language ) throws InvalidQueryException , RepositoryException { } }
sanityCheck ( ) ; return searchMgr . createQuery ( session , itemMgr , statement , language ) ;
public class DescribeEventCategoriesResult { /** * A list of event categories descriptions . * @ return A list of event categories descriptions . */ public java . util . List < EventCategoriesMap > getEventCategoriesMapList ( ) { } }
if ( eventCategoriesMapList == null ) { eventCategoriesMapList = new com . amazonaws . internal . SdkInternalList < EventCategoriesMap > ( ) ; } return eventCategoriesMapList ;
public class SessionDialog { /** * { @ inheritDoc } * < strong > Note : < / strong > Creation of UI Shared Contexts should be done before calling this method . * @ see # recreateUISharedContexts ( Session ) */ @ Override public void initParam ( Object session ) { } }
super . initParam ( session ) ; this . session = ( Session ) session ; for ( AbstractParamPanel panel : super . getPanels ( ) ) { if ( panel instanceof AbstractContextPropertiesPanel ) { initContextPanel ( ( AbstractContextPropertiesPanel ) panel ) ; } }
public class ShortExtensions { /** * The < code > identity equals < / code > operator . This is the equivalent to Java ' s < code > = = < / code > * operator . * @ param a a short . * @ param b a long . * @ return < code > a = = b < / code > * @ since 2.4 */ @ Pure @ Inline ( value = "($1 == $2)" , constantExpression = true ) public static boolean operator_tripleEquals ( short a , long b ) { } }
return a == b ;
public class GenericTypeResolver { /** * Determine the raw type for the given generic parameter type . * @ param genericType the generic type to resolve * @ param typeVariableMap the TypeVariable Map to resolved against * @ return the resolved raw type */ static Type getRawType ( Type genericType , Map < TypeVariable , Type > typeVariableMap ) { } }
Type resolvedType = genericType ; if ( genericType instanceof TypeVariable ) { TypeVariable tv = ( TypeVariable ) genericType ; resolvedType = typeVariableMap . get ( tv ) ; if ( resolvedType == null ) { resolvedType = extractBoundForTypeVariable ( tv ) ; } } if ( resolvedType instanceof ParameterizedType ) { return ( ( ParameterizedType ) resolvedType ) . getRawType ( ) ; } else { return resolvedType ; }
public class ProjectsInner { /** * Create or update project . * The project resource is a nested resource representing a stored migration project . The PUT method creates a new project or updates an existing one . * @ param groupName Name of the resource group * @ param serviceName Name of the service * @ param projectName Name of the project * @ param parameters Information about the project * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ProjectInner object */ public Observable < ProjectInner > createOrUpdateAsync ( String groupName , String serviceName , String projectName , ProjectInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( groupName , serviceName , projectName , parameters ) . map ( new Func1 < ServiceResponse < ProjectInner > , ProjectInner > ( ) { @ Override public ProjectInner call ( ServiceResponse < ProjectInner > response ) { return response . body ( ) ; } } ) ;
public class UpdateCenter { /** * Schedules a Jenkins upgrade . */ @ RequirePOST public void doUpgrade ( StaplerResponse rsp ) throws IOException , ServletException { } }
HudsonUpgradeJob job = new HudsonUpgradeJob ( getCoreSource ( ) , Jenkins . getAuthentication ( ) ) ; if ( ! Lifecycle . get ( ) . canRewriteHudsonWar ( ) ) { sendError ( "Jenkins upgrade not supported in this running mode" ) ; return ; } LOGGER . info ( "Scheduling the core upgrade" ) ; addJob ( job ) ; rsp . sendRedirect2 ( "." ) ;
public class RxSharedPreferences { /** * Create an enum preference for { @ code key } with a default of { @ code defaultValue } . */ @ CheckResult @ NonNull public < T extends Enum < T > > Preference < T > getEnum ( @ NonNull String key , @ NonNull T defaultValue , @ NonNull Class < T > enumClass ) { } }
checkNotNull ( key , "key == null" ) ; checkNotNull ( defaultValue , "defaultValue == null" ) ; checkNotNull ( enumClass , "enumClass == null" ) ; return new RealPreference < > ( preferences , key , defaultValue , new EnumAdapter < > ( enumClass ) , keyChanges ) ;
public class DescribeInputResult { /** * A collection of key - value pairs . * @ param tags * A collection of key - value pairs . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeInputResult withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class HookMethodManager { /** * get the stackLines for the hook method code line . */ private List < StackLine > getCodeLineHookedStackLines ( final String hookedMethodSimpleName , final String actualHookedMethodSimpleName , final int hookedLine , final int actualHookedLine ) { } }
// this replacer replaces method name and line to the actual hooked method name and line LineReplacer replacer = new LineReplacer ( ) { @ Override public void replace ( String classQualifiedName , String methodSimpleName , int line ) { super . replace ( classQualifiedName , methodSimpleName , line ) ; // replacing root method name if ( StringUtils . equals ( methodSimpleName , currentActualRootMethodSimpleName ) ) { replaceMethodSimpleName ( currentRunResult . getRootMethod ( ) . getSimpleName ( ) ) ; } // replacing hooked method name and line number if ( StringUtils . equals ( methodSimpleName , actualHookedMethodSimpleName ) && ( line == actualHookedLine ) ) { replaceMethodSimpleName ( hookedMethodSimpleName ) ; replaceLine ( hookedLine ) ; } } } ; List < StackLine > result = StackLineUtils . getStackLines ( srcTree , Thread . currentThread ( ) . getStackTrace ( ) , replacer ) ; assert result . size ( ) > 0 ; return result ;
public class Beans { /** * Fills empty , identically named public fields with values from another object . * @ param < T > the class of the destination object * @ param destination a destination object * @ param source a source object * @ return the same destination object with fields filled by source */ public static < T > T fill ( T destination , Object source ) { } }
if ( destination != source ) { Class < ? > stype = source . getClass ( ) ; for ( Field field : destination . getClass ( ) . getFields ( ) ) { try { Object value = field . get ( destination ) ; if ( value == null ) { field . set ( destination , stype . getField ( field . getName ( ) ) . get ( source ) ) ; } } catch ( Exception x ) { } } } return destination ;
public class BoxRetentionPolicy { /** * Assigns this retention policy to folder . * @ param folder the folder to assign policy to . * @ return info about created assignment . */ public BoxRetentionPolicyAssignment . Info assignTo ( BoxFolder folder ) { } }
return BoxRetentionPolicyAssignment . createAssignmentToFolder ( this . getAPI ( ) , this . getID ( ) , folder . getID ( ) ) ;
public class FachwertFactory { /** * Validiert die uebergebenen Argumente mit Hilfe der angegebenen Klasse . * Viele Fachwert - Klassen haben eine ( statische ) validate - Methode , die * dafuer verwendet wird . Fehlt diese validate - Methode , wird der * Konstruktor fuer die Validierung herangezogen . Schlaegt die Validierung * fehl , wird eine { @ link javax . validation . ValidationException } geworfen . * Dies ist eine der wenigen Stelle , wo eine * Log - Ausgabe erscheinen kann . Hintergrund ist die Exception , die hier * gefangen , aber nicht weitergegeben wird . Im Log - Level " FINE " kann man * sich diese Exception zur Fehlersuche ausgeben . * @ param clazz Fachwert - Klasse * @ param args Argument ( e ) , die validiert werden */ public void validate ( Class < ? extends Fachwert > clazz , Object ... args ) { } }
Class [ ] argTypes = toTypes ( args ) ; try { Method method = clazz . getMethod ( "validate" , argTypes ) ; method . invoke ( null , args ) ; } catch ( InvocationTargetException ex ) { LOG . log ( Level . FINE , "Call of validate method of " + clazz + "failed:" , ex ) ; if ( ex . getTargetException ( ) instanceof ValidationException ) { throw ( ValidationException ) ex . getTargetException ( ) ; } getFachwert ( clazz , args ) ; } catch ( ReflectiveOperationException ex ) { LOG . log ( Level . FINE , "Cannot call validate method of " + clazz , ex ) ; getFachwert ( clazz , args ) ; }
public class ClassDescriptorDef { /** * Processes theis class ( ensures that all base types are processed , copies their features to this class , and applies * modifications ( removes ignored features , changes declarations ) . * @ throws ConstraintException If a constraint has been violated */ public void process ( ) throws ConstraintException { } }
ClassDescriptorDef otherClassDef ; for ( Iterator it = getDirectBaseTypes ( ) ; it . hasNext ( ) ; ) { otherClassDef = ( ClassDescriptorDef ) it . next ( ) ; if ( ! otherClassDef . hasBeenProcessed ( ) ) { otherClassDef . process ( ) ; } } for ( Iterator it = getNested ( ) ; it . hasNext ( ) ; ) { otherClassDef = ( ( NestedDef ) it . next ( ) ) . getNestedType ( ) ; if ( ! otherClassDef . hasBeenProcessed ( ) ) { otherClassDef . process ( ) ; } } ArrayList newFields = new ArrayList ( ) ; ArrayList newReferences = new ArrayList ( ) ; ArrayList newCollections = new ArrayList ( ) ; FieldDescriptorDef newFieldDef ; ReferenceDescriptorDef newRefDef ; CollectionDescriptorDef newCollDef ; // adding base features if ( getBooleanProperty ( PropertyHelper . OJB_PROPERTY_INCLUDE_INHERITED , true ) ) { ArrayList baseTypes = new ArrayList ( ) ; DefBase featureDef ; addRelevantBaseTypes ( this , baseTypes ) ; for ( Iterator it = baseTypes . iterator ( ) ; it . hasNext ( ) ; ) { cloneInheritedFeatures ( ( ClassDescriptorDef ) it . next ( ) , newFields , newReferences , newCollections ) ; } for ( Iterator it = newFields . iterator ( ) ; it . hasNext ( ) ; ) { newFieldDef = ( FieldDescriptorDef ) it . next ( ) ; featureDef = getFeature ( newFieldDef . getName ( ) ) ; if ( featureDef != null ) { if ( ! getBooleanProperty ( PropertyHelper . OJB_PROPERTY_IGNORE , false ) ) { // we have the implicit constraint that an anonymous field cannot redefine / be redefined // except if it is ignored if ( "anonymous" . equals ( featureDef . getProperty ( PropertyHelper . OJB_PROPERTY_ACCESS ) ) ) { throw new ConstraintException ( "The anonymous field " + featureDef . getName ( ) + " in class " + getName ( ) + " overrides an inherited field" ) ; } if ( "anonymous" . equals ( newFieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_ACCESS ) ) ) { throw new ConstraintException ( "The inherited anonymous field " + newFieldDef . getName ( ) + " is overriden in class " + getName ( ) ) ; } } LogHelper . warn ( true , ClassDescriptorDef . class , "process" , "Class " + getName ( ) + " redefines the inherited field " + newFieldDef . getName ( ) ) ; it . remove ( ) ; } } for ( Iterator it = newReferences . iterator ( ) ; it . hasNext ( ) ; ) { newRefDef = ( ReferenceDescriptorDef ) it . next ( ) ; if ( "super" . equals ( newRefDef . getName ( ) ) ) { // we don ' t inherit super - references it . remove ( ) ; } else if ( hasFeature ( newRefDef . getName ( ) ) ) { LogHelper . warn ( true , ClassDescriptorDef . class , "process" , "Class " + getName ( ) + " redefines the inherited reference " + newRefDef . getName ( ) ) ; it . remove ( ) ; } } for ( Iterator it = newCollections . iterator ( ) ; it . hasNext ( ) ; ) { newCollDef = ( CollectionDescriptorDef ) it . next ( ) ; if ( hasFeature ( newCollDef . getName ( ) ) ) { LogHelper . warn ( true , ClassDescriptorDef . class , "process" , "Class " + getName ( ) + " redefines the inherited collection " + newCollDef . getName ( ) ) ; it . remove ( ) ; } } } // adding nested features for ( Iterator it = getNested ( ) ; it . hasNext ( ) ; ) { cloneNestedFeatures ( ( NestedDef ) it . next ( ) , newFields , newReferences , newCollections ) ; } _fields . addAll ( 0 , newFields ) ; _references . addAll ( 0 , newReferences ) ; _collections . addAll ( 0 , newCollections ) ; sortFields ( ) ; _hasBeenProcessed = true ;
public class Parameters { /** * Set a date value to the query parameter referenced by the given name . A query parameter * is defined by using the Expression ' s parameter ( String name ) function . * @ param name The parameter name . * @ param value The date value . * @ return The self object . */ @ NonNull public Parameters setDate ( @ NonNull String name , Date value ) { } }
return setValue ( name , value ) ;
public class ElementMatchers { /** * Matches a { @ link ByteCodeElement } that is accessible to a given { @ link java . lang . Class } . * @ param type The type that a matched byte code element is expected to be accessible to . * @ param < T > The type of the matched object . * @ return A matcher for a byte code element to be accessible to a given { @ code type } . */ public static < T extends ByteCodeElement > ElementMatcher . Junction < T > isAccessibleTo ( Class < ? > type ) { } }
return isAccessibleTo ( TypeDescription . ForLoadedType . of ( type ) ) ;
public class CmsLogChannelTable { /** * Returns log files to given Logger . < p > * @ param logger to read files for * @ return path of file */ private String getLogFiles ( Logger logger ) { } }
String test = "" ; int count = 0 ; // select the Appender from logger for ( Appender appender : logger . getAppenders ( ) . values ( ) ) { // only use file appenders if ( CmsLogFileApp . isFileAppender ( appender ) ) { String fileName = CmsLogFileApp . getFileName ( appender ) ; String temp = "" ; temp = fileName . substring ( fileName . lastIndexOf ( File . separatorChar ) + 1 ) ; test = test + temp ; count ++ ; break ; } } // iterate all parent loggers until a logger with appender was found while ( ! logger . equals ( LogManager . getRootLogger ( ) ) ) { logger = logger . getParent ( ) ; // if no Appender found from logger , select the Appender from parent logger if ( count == 0 ) { for ( Appender appender : logger . getAppenders ( ) . values ( ) ) { // only use file appenders if ( CmsLogFileApp . isFileAppender ( appender ) ) { String fileName = CmsLogFileApp . getFileName ( appender ) ; String temp = "" ; temp = fileName . substring ( fileName . lastIndexOf ( File . separatorChar ) + 1 ) ; test = test + temp ; count ++ ; break ; } } } } return test ;
import java . util . List ; class MaximumTrianglePath { /** * A function to calculate the maximum sum of numbers from top to bottom of the triangle . * > > > maximum _ triangle _ path ( [ [ 1 , 0 , 0 ] , [ 4 , 8 , 0 ] , [ 1 , 5 , 3 ] ] , 2 , 2) * 14 * > > > maximum _ triangle _ path ( [ [ 13 , 0 , 0 ] , [ 7 , 4 , 0 ] , [ 2 , 4 , 6 ] ] , 2 , 2) * 24 * > > > maximum _ triangle _ path ( [ [ 2 , 0 , 0 ] , [ 11 , 18 , 0 ] , [ 21 , 25 , 33 ] ] , 2 , 2) * 53 */ public static Integer maximumTrianglePath ( List < List < Integer > > triangle , int rows , int cols ) { } }
// Start from the second last row for ( int i = rows - 1 ; i >= 0 ; i -- ) { // Compute max of triangle [ i ] [ j ] and triangle [ i ] [ j + 1 ] , and add it to triangle [ i - 1 ] [ j ] for ( int j = 0 ; j <= i ; j ++ ) { // Pick maximum sum value from the children int max_num = Math . max ( triangle . get ( i + 1 ) . get ( j ) , triangle . get ( i + 1 ) . get ( j + 1 ) ) ; // Add max _ num to the current cell triangle . get ( i ) . set ( j , triangle . get ( i ) . get ( j ) + max_num ) ; } } // Return the top cell in triangle which now stores the maximum sum return triangle . get ( 0 ) . get ( 0 ) ;
public class DescribePipelinesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribePipelinesRequest describePipelinesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describePipelinesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describePipelinesRequest . getPipelineIds ( ) , PIPELINEIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Calendar { /** * Returns the string representation of the calendar * < code > field < / code > value in the given < code > style < / code > and * < code > locale < / code > . If no string representation is * applicable , < code > null < / code > is returned . This method calls * { @ link Calendar # get ( int ) get ( field ) } to get the calendar * < code > field < / code > value if the string representation is * applicable to the given calendar < code > field < / code > . * < p > For example , if this < code > Calendar < / code > is a * < code > GregorianCalendar < / code > and its date is 2005-01-01 , then * the string representation of the { @ link # MONTH } field would be * " January " in the long style in an English locale or " Jan " in * the short style . However , no string representation would be * available for the { @ link # DAY _ OF _ MONTH } field , and this method * would return < code > null < / code > . * < p > The default implementation supports the calendar fields for * which a { @ link DateFormatSymbols } has names in the given * < code > locale < / code > . * @ param field * the calendar field for which the string representation * is returned * @ param style * the style applied to the string representation ; one of { @ link * # SHORT _ FORMAT } ( { @ link # SHORT } ) , { @ link # SHORT _ STANDALONE } , * { @ link # LONG _ FORMAT } ( { @ link # LONG } ) , { @ link # LONG _ STANDALONE } , * { @ link # NARROW _ FORMAT } , or { @ link # NARROW _ STANDALONE } . * @ param locale * the locale for the string representation * ( any calendar types specified by { @ code locale } are ignored ) * @ return the string representation of the given * { @ code field } in the given { @ code style } , or * { @ code null } if no string representation is * applicable . * @ exception IllegalArgumentException * if { @ code field } or { @ code style } is invalid , * or if this { @ code Calendar } is non - lenient and any * of the calendar fields have invalid values * @ exception NullPointerException * if { @ code locale } is null * @ since 1.6 */ public String getDisplayName ( int field , int style , Locale locale ) { } }
if ( ! checkDisplayNameParams ( field , style , ALL_STYLES , LONG , locale , ERA_MASK | MONTH_MASK | DAY_OF_WEEK_MASK | AM_PM_MASK ) ) { return null ; } DateFormatSymbols symbols = DateFormatSymbols . getInstance ( locale ) ; String [ ] strings = getFieldStrings ( field , style , symbols ) ; if ( strings != null ) { int fieldValue = get ( field ) ; if ( fieldValue < strings . length ) { return strings [ fieldValue ] ; } } return null ;
public class Matrix4x3d { /** * Apply an orthographic projection transformation for a left - handed coordinate system to this matrix . * This method is equivalent to calling { @ link # orthoLH ( double , double , double , double , double , double ) orthoLH ( ) } with * < code > zNear = - 1 < / code > and < code > zFar = + 1 < / code > . * If < code > M < / code > is < code > this < / code > matrix and < code > O < / code > the orthographic projection matrix , * then the new matrix will be < code > M * O < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > M * O * v < / code > , the * orthographic projection transformation will be applied first ! * In order to set the matrix to an orthographic projection without post - multiplying it , * use { @ link # setOrtho2DLH ( double , double , double , double ) setOrtho2DLH ( ) } . * Reference : < a href = " http : / / www . songho . ca / opengl / gl _ projectionmatrix . html # ortho " > http : / / www . songho . ca < / a > * @ see # orthoLH ( double , double , double , double , double , double ) * @ see # setOrtho2DLH ( double , double , double , double ) * @ param left * the distance from the center to the left frustum edge * @ param right * the distance from the center to the right frustum edge * @ param bottom * the distance from the center to the bottom frustum edge * @ param top * the distance from the center to the top frustum edge * @ return this */ public Matrix4x3d ortho2DLH ( double left , double right , double bottom , double top ) { } }
return ortho2DLH ( left , right , bottom , top , this ) ;
public class CPDefinitionInventoryLocalServiceUtil { /** * Deletes the cp definition inventory with the primary key from the database . Also notifies the appropriate model listeners . * @ param CPDefinitionInventoryId the primary key of the cp definition inventory * @ return the cp definition inventory that was removed * @ throws PortalException if a cp definition inventory with the primary key could not be found */ public static com . liferay . commerce . model . CPDefinitionInventory deleteCPDefinitionInventory ( long CPDefinitionInventoryId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . deleteCPDefinitionInventory ( CPDefinitionInventoryId ) ;
public class StartBulkDeploymentRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StartBulkDeploymentRequest startBulkDeploymentRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( startBulkDeploymentRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startBulkDeploymentRequest . getAmznClientToken ( ) , AMZNCLIENTTOKEN_BINDING ) ; protocolMarshaller . marshall ( startBulkDeploymentRequest . getExecutionRoleArn ( ) , EXECUTIONROLEARN_BINDING ) ; protocolMarshaller . marshall ( startBulkDeploymentRequest . getInputFileUri ( ) , INPUTFILEURI_BINDING ) ; protocolMarshaller . marshall ( startBulkDeploymentRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonObject { /** * Returns the < code > int < / code > value of the member with the specified name in this object . If * this object does not contain a member with this name , the given default value is returned . If * this object contains multiple members with the given name , the last one will be picked . If this * member ' s value does not represent a JSON number or if it cannot be interpreted as Java * < code > int < / code > , an exception is thrown . * @ param name * the name of the member whose value is to be returned * @ param defaultValue * the value to be returned if the requested member is missing * @ return the value of the last member with the specified name , or the given default value if * this object does not contain a member with that name */ public int getInt ( String name , int defaultValue ) { } }
JsonValue value = get ( name ) ; return value != null ? value . asInt ( ) : defaultValue ;
public class Request { /** * 获得请求header中的信息 * @ param headerKey 头信息的KEY * @ param charset 字符集 * @ return header值 */ public final static String getHeader ( String headerKey , String charset ) { } }
return ServletUtil . getHeader ( getServletRequest ( ) , headerKey , charset ) ;
public class XCodeContext { /** * / * ( non - Javadoc ) * @ see com . sap . prd . mobile . ios . mios . IXCodeContext # getSDK ( ) */ @ Override public String getSDK ( ) { } }
return getOptions ( ) . getAllOptions ( ) . get ( Options . ManagedOption . SDK . getOptionName ( ) ) ;
public class UpdateMethodRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateMethodRequest updateMethodRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateMethodRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateMethodRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( updateMethodRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( updateMethodRequest . getHttpMethod ( ) , HTTPMETHOD_BINDING ) ; protocolMarshaller . marshall ( updateMethodRequest . getPatchOperations ( ) , PATCHOPERATIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FeatureWebSecurityConfigImpl { /** * { @ inheritDoc } */ @ Override public boolean isUseOnlyCustomCookieName ( ) { } }
WebAppSecurityConfig globalConfig = WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) ; if ( globalConfig != null ) return WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) . isUseOnlyCustomCookieName ( ) ; else return useOnlyCustomCookieName ;
public class IntentsClient { /** * Returns the list of all intents in the specified agent . * < p > Sample code : * < pre > < code > * try ( IntentsClient intentsClient = IntentsClient . create ( ) ) { * ProjectAgentName parent = ProjectAgentName . of ( " [ PROJECT ] " ) ; * for ( Intent element : intentsClient . listIntents ( parent ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Required . The agent to list all intents from . Format : ` projects / & lt ; Project * ID & gt ; / agent ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListIntentsPagedResponse listIntents ( ProjectAgentName parent ) { } }
ListIntentsRequest request = ListIntentsRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . build ( ) ; return listIntents ( request ) ;
public class ProposalResponse { /** * getChaincodeActionResponseStatus returns the what chaincode executions set as the return status . * @ return status code . * @ throws InvalidArgumentException */ public int getChaincodeActionResponseStatus ( ) throws InvalidArgumentException { } }
if ( statusReturnCode != - 1 ) { return statusReturnCode ; } try { final ProposalResponsePayloadDeserializer proposalResponsePayloadDeserializer = getProposalResponsePayloadDeserializer ( ) ; statusReturnCode = proposalResponsePayloadDeserializer . getExtension ( ) . getResponseStatus ( ) ; return statusReturnCode ; } catch ( InvalidArgumentException e ) { throw e ; } catch ( Exception e ) { throw new InvalidArgumentException ( e ) ; }
public class AbstractRemoteClient { /** * { @ inheritDoc } * @ throws org . openbase . jul . exception . CouldNotPerformException { @ inheritDoc } * @ throws java . lang . InterruptedException { @ inheritDoc } */ @ Override public < R , T extends Object > R callMethod ( final String methodName , final T argument ) throws CouldNotPerformException , InterruptedException { } }
return callMethod ( methodName , argument , - 1 ) ;
public class aaaglobal_authenticationnegotiateaction_binding { /** * Use this API to fetch a aaaglobal _ authenticationnegotiateaction _ binding resources . */ public static aaaglobal_authenticationnegotiateaction_binding [ ] get ( nitro_service service ) throws Exception { } }
aaaglobal_authenticationnegotiateaction_binding obj = new aaaglobal_authenticationnegotiateaction_binding ( ) ; aaaglobal_authenticationnegotiateaction_binding response [ ] = ( aaaglobal_authenticationnegotiateaction_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class Base64 { /** * Base64 decode a string into a byte array . * @ param text * The encoded base64 text . * @ return a byte array of the decoded data . * @ throws Exception */ public static byte [ ] decode ( String text ) throws Exception { } }
if ( text . length ( ) % 4 != 0 ) { throw new Exception ( "Base64 not a multiple of 4 bytes" ) ; } byte [ ] result = new byte [ text . length ( ) / 4 * 3 ] ; int p = 0 ; for ( int i = 0 ; i < text . length ( ) ; ) { if ( text . charAt ( i ) == '\n' ) { continue ; } int b1 = b64decode ( text . charAt ( i ++ ) ) ; int b2 = b64decode ( text . charAt ( i ++ ) ) ; int b3 = b64decode ( text . charAt ( i ++ ) ) ; int b4 = b64decode ( text . charAt ( i ++ ) ) ; if ( b4 >= 0 ) { int three = b4 | b3 << 6 | b2 << 12 | b1 << 18 ; result [ p ++ ] = ( byte ) ( ( three & 0xff0000 ) >> 16 ) ; result [ p ++ ] = ( byte ) ( ( three & 0xff00 ) >> 8 ) ; result [ p ++ ] = ( byte ) ( three & 0xff ) ; } else if ( b3 >= 0 ) { int two = b3 << 6 | b2 << 12 | b1 << 18 ; result [ p ++ ] = ( byte ) ( ( two & 0xff0000 ) >> 16 ) ; result [ p ++ ] = ( byte ) ( ( two & 0xff00 ) >> 8 ) ; } else { int one = b2 << 12 | b1 << 18 ; result [ p ++ ] = ( byte ) ( ( one & 0xff0000 ) >> 16 ) ; } } byte [ ] output = new byte [ p ] ; System . arraycopy ( result , 0 , output , 0 , p ) ; return output ;
public class LinkClustering { /** * Calculates the similarity between all pair - wise combinations of edges , * returning a max - heap ( { @ link PriorityQueue } ) which has the most similar * edges appear at the top . This method assumes that the only similarities * that matter are those that occur between two edges that share a vertex . * @ param graph a graph whose edges are to be compared * @ param minSimilarity an optional parameter for discarding edge pairs * whose similarity is below this value , which can save space held by * low - similairty pairs that are never used in the merging process . * However , setting this value too high results can result in * incomplete or incorrect merge sequences . * @ return the similarity matrix */ private < E extends Edge > PriorityQueue < EdgePair > calcuateEdgeSimQueue ( final Graph < E > graph , final double minSimilarity ) { } }
final int numVertices = graph . order ( ) ; final int numEdges = graph . size ( ) ; double avgDegree = numEdges / ( double ) numVertices ; final int numComparisons = ( int ) ( ( ( avgDegree * ( avgDegree + 1 ) ) / 2 ) * numVertices ) ; // System . out . printf ( " size : % d , order : % d , avg . degree : % f , expected num comparisons : % d % n " , // numEdges , numVertices , avgDegree , numComparisons ) ; final PriorityQueue < EdgePair > pq = new PriorityQueue < EdgePair > ( numComparisons ) ; Object key = WORK_QUEUE . registerTaskGroup ( graph . order ( ) ) ; IntIterator iter1 = graph . vertices ( ) . iterator ( ) ; while ( iter1 . hasNext ( ) ) { final int v1 = iter1 . nextInt ( ) ; WORK_QUEUE . add ( key , new Runnable ( ) { public void run ( ) { veryVerbose ( LOGGER , "Computing similarities for " + "vertex %d" , v1 ) ; // Set < E > adjList = graph . getAdjacencyList ( v1 ) ; IntSet neighbors = graph . getNeighbors ( v1 ) ; // Create a thread - local PriorityQueue that will hold // the edge similarities for this vertex . Once all the // simialrites have been computed , we can update the // thread - shared queue with minimal locking PriorityQueue < EdgePair > localQ = new PriorityQueue < EdgePair > ( neighbors . size ( ) ) ; IntIterator it1 = neighbors . iterator ( ) ; // for ( E e1 : adjList ) { while ( it1 . hasNext ( ) ) { // int v2 = ( e1 . to ( ) = = v1 ) ? e1 . from ( ) : e1 . to ( ) ; int v2 = it1 . nextInt ( ) ; IntIterator it2 = neighbors . iterator ( ) ; // for ( Edge e2 : graph . getAdjacencyList ( v1 ) ) { while ( it2 . hasNext ( ) ) { int v3 = it2 . nextInt ( ) ; if ( v2 == v3 ) break ; // if ( e1 . equals ( e2 ) ) // break ; // int v3 = ( e2 . to ( ) = = v1 ) ? e2 . from ( ) : e2 . to ( ) ; float sim = ( float ) getConnectionSimilarity ( graph , v1 , v2 , v3 ) ; // System . out . printf ( " ( % d , % d ) , ( % d , % d ) : % f % n " , // Math . min ( v1 , v2 ) , // Math . max ( v1 , v2 ) , // Math . min ( v1 , v3 ) , // Math . max ( v1 , v3 ) , sim ) ; if ( sim > minSimilarity ) // localQ . add ( new EdgePair ( - sim , e1 , e2 ) ) ; localQ . add ( new EdgePair ( - sim , v1 , v2 , v3 ) ) ; } } synchronized ( pq ) { pq . addAll ( localQ ) ; int comps = pq . size ( ) ; veryVerbose ( LOGGER , "%d/%d comparisons " + "completed (%f)" , comps , numComparisons , ( double ) comps / numComparisons ) ; } } } ) ; } WORK_QUEUE . await ( key ) ; return pq ;
public class StandardDirectoryAgentServer { /** * Replaces or updates a previously cached service ( if any ) with the given service . * @ param service the new service * @ param update whether the given service replaces or updates a previously cached service * @ return a structure containing the previous service ( if any ) and the current service */ protected ServiceInfoCache . Result < ServiceInfo > cacheService ( ServiceInfo service , boolean update ) { } }
// RFC 2608 , 7.0 if ( ! scopes . match ( service . getScopes ( ) ) ) { if ( logger . isLoggable ( Level . FINE ) ) logger . fine ( "Could not register service " + service + ", DirectoryAgent scopes " + scopes + " do not match with service scopes " + service . getScopes ( ) ) ; throw new ServiceLocationException ( "Could not register service " + service , SLPError . SCOPE_NOT_SUPPORTED ) ; } if ( update ) { ServiceInfoCache . Result < ServiceInfo > result = services . addAttributes ( service . getKey ( ) , service . getAttributes ( ) ) ; if ( logger . isLoggable ( Level . FINE ) ) logger . fine ( "Added attributes " + service + " to service " + result . getPrevious ( ) + ", result is: " + result . getCurrent ( ) ) ; return result ; } else { ServiceInfoCache . Result < ServiceInfo > result = services . put ( service ) ; if ( logger . isLoggable ( Level . FINE ) ) { if ( result . getPrevious ( ) == null ) logger . fine ( "Registered service " + service ) ; else logger . fine ( "Replaced service " + result . getPrevious ( ) + " with service " + service ) ; } return result ; }
public class Op { /** * Creates an < i > operation expression < / i > on the specified target object , specifying the * target type by means of the < tt > type < / tt > parameter . * @ param type the type of the target object ( input type for the expression ) * @ param target the target object on which the expression will execute * @ return an operator , ready for chaining */ public static < T > Level0SetOperator < Set < T > , T > onSetOf ( final Type < T > type , final Set < ? extends T > target ) { } }
return new Level0SetOperator < Set < T > , T > ( ExecutionTarget . forOp ( target , Normalisation . SET ) ) ;
public class PhaseFourImpl { /** * { @ inheritDoc } * @ throws SQLException */ @ Override public void stage4LoadKAM ( DBConnection dbConnection , ProtoNetwork p2pn , String schema ) throws DatabaseError , CreateKAMFailure { } }
JdbcKAMLoaderImpl jkl ; try { jkl = new JdbcKAMLoaderImpl ( dbConnection , schema ) ; } catch ( SQLException e ) { final String msg = "Error creating KAM loader" ; throw new DatabaseError ( schema , msg , e ) ; } if ( ! jkl . schemaExists ( ) ) { final String fmt = "schema \"%s\" does not exist" ; final String msg = format ( fmt , jkl . getSchemaName ( ) ) ; throw new CreateKAMFailure ( dbConnection , msg ) ; } try { // load type tables jkl . loadObjectTypes ( ) ; } catch ( SQLException e ) { final String msg = "Error loading object types" ; throw new DatabaseError ( schema , msg , e ) ; } try { jkl . loadFunctionTypes ( ) ; } catch ( SQLException e ) { final String msg = "Error loading function types" ; throw new DatabaseError ( schema , msg , e ) ; } try { jkl . loadRelationshipTypes ( ) ; } catch ( SQLException e ) { final String msg = "Error loading relationship types" ; throw new DatabaseError ( schema , msg , e ) ; } try { jkl . loadAnnotationDefinitionTypes ( ) ; } catch ( SQLException e ) { final String msg = "Error loading annotation definitions types" ; throw new DatabaseError ( schema , msg , e ) ; } // load documents List < DocumentHeader > dhs = p2pn . getDocumentTable ( ) . getDocumentHeaders ( ) ; try { jkl . loadDocuments ( dhs ) ; } catch ( SQLException e ) { final String msg = "Error loading documents" ; throw new DatabaseError ( schema , msg , e ) ; } // load namespaces NamespaceTable nt = p2pn . getNamespaceTable ( ) ; Set < TableNamespace > nsl = nt . getNamespaces ( ) ; Map < TableNamespace , Integer > nsi = nt . getNamespaceIndex ( ) ; for ( TableNamespace ns : nsl ) { try { jkl . loadNamespace ( nsi . get ( ns ) , ns ) ; } catch ( SQLException e ) { final String fmt = "Error loading namespace %s/%s" ; final String msg = format ( fmt , ns . getPrefix ( ) , ns . getResourceLocation ( ) ) ; throw new DatabaseError ( schema , msg , e ) ; } } // load annotation definitions try { jkl . loadAnnotationDefinitions ( p2pn . getAnnotationDefinitionTable ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading annotation definitions" ; throw new DatabaseError ( schema , msg , e ) ; } try { // load annotations jkl . loadAnnotationValues ( p2pn . getAnnotationValueTable ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading annotation values" ; throw new DatabaseError ( schema , msg , e ) ; } try { // load document to namespace map jkl . loadDocumentNamespaceMap ( nt . getDocumentNamespaces ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading document namespaces" ; throw new DatabaseError ( schema , msg , e ) ; } try { // load nodes jkl . loadNodes ( p2pn . getNamespaceTable ( ) , p2pn . getParameterTable ( ) , p2pn . getTermTable ( ) , p2pn . getTermParameterMapTable ( ) , p2pn . getProtoNodeTable ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading nodes" ; throw new DatabaseError ( schema , msg , e ) ; } try { // load edges jkl . loadEdges ( p2pn . getStatementTable ( ) , p2pn . getTermTable ( ) , p2pn . getProtoNodeTable ( ) , p2pn . getProtoEdgeTable ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading edges" ; throw new DatabaseError ( schema , msg , e ) ; } try { // associate annotations to statements jkl . loadStatementAnnotationMap ( p2pn . getStatementAnnotationMapTable ( ) ) ; } catch ( SQLException e ) { final String msg = "Error loading statement annotations" ; throw new DatabaseError ( schema , msg , e ) ; } // close loader dao jkl . terminate ( ) ;
public class Infer { /** * Infer cyclic inference variables as described in 15.12.2.8. */ private void instantiateAsUninferredVars ( List < Type > vars , InferenceContext inferenceContext ) { } }
ListBuffer < Type > todo = new ListBuffer < > ( ) ; // step 1 - create fresh tvars for ( Type t : vars ) { UndetVar uv = ( UndetVar ) inferenceContext . asUndetVar ( t ) ; List < Type > upperBounds = uv . getBounds ( InferenceBound . UPPER ) ; if ( Type . containsAny ( upperBounds , vars ) ) { TypeSymbol fresh_tvar = new TypeVariableSymbol ( Flags . SYNTHETIC , uv . qtype . tsym . name , null , uv . qtype . tsym . owner ) ; fresh_tvar . type = new TypeVar ( fresh_tvar , types . makeIntersectionType ( uv . getBounds ( InferenceBound . UPPER ) ) , null ) ; todo . append ( uv ) ; uv . inst = fresh_tvar . type ; } else if ( upperBounds . nonEmpty ( ) ) { uv . inst = types . glb ( upperBounds ) ; } else { uv . inst = syms . objectType ; } } // step 2 - replace fresh tvars in their bounds List < Type > formals = vars ; for ( Type t : todo ) { UndetVar uv = ( UndetVar ) t ; TypeVar ct = ( TypeVar ) uv . inst ; ct . bound = types . glb ( inferenceContext . asInstTypes ( types . getBounds ( ct ) ) ) ; if ( ct . bound . isErroneous ( ) ) { // report inference error if glb fails reportBoundError ( uv , BoundErrorKind . BAD_UPPER ) ; } formals = formals . tail ; }
public class AbstractIcalObject { /** * parse the ical object from the given ical content using the given schema . * Modifies the current object in place . * @ param schema rules for processing individual parameters and body content . */ protected void parse ( String icalString , IcalSchema schema ) throws ParseException { } }
String paramText ; String content ; { String unfolded = IcalParseUtil . unfoldIcal ( icalString ) ; Matcher m = CONTENT_LINE_RE . matcher ( unfolded ) ; if ( ! m . matches ( ) ) { schema . badContent ( icalString ) ; } setName ( m . group ( 1 ) . toUpperCase ( ) ) ; paramText = m . group ( 2 ) ; if ( null == paramText ) { paramText = "" ; } content = m . group ( 3 ) ; } // parse parameters Map < String , String > params = new HashMap < String , String > ( ) ; String rest = paramText ; while ( ! "" . equals ( rest ) ) { Matcher m = PARAM_RE . matcher ( rest ) ; if ( ! m . find ( ) ) { schema . badPart ( rest , null ) ; } rest = rest . substring ( m . end ( 0 ) ) ; String k = m . group ( 1 ) . toUpperCase ( ) ; String v = m . group ( 2 ) ; if ( null == v ) { v = m . group ( 3 ) ; } if ( params . containsKey ( k ) ) { schema . dupePart ( k ) ; } params . put ( k , v ) ; } // parse the content and individual attribute values schema . applyObjectSchema ( this . name , params , content , this ) ;
public class MultipartUploadRequest { /** * Adds a file to this upload request . * @ param filePath path to the file that you want to upload * @ param parameterName Name of the form parameter that will contain file ' s data * @ param fileName File name seen by the server side script . If null , the original file name * will be used * @ param contentType Content type of the file . You can use constants defined in * { @ link ContentType } class . Set this to null or empty string to try to * automatically detect the mime type from the file . If the mime type can ' t * be detected , { @ code application / octet - stream } will be used by default * @ throws FileNotFoundException if the file does not exist at the specified path * @ throws IllegalArgumentException if one or more parameters are not valid * @ return { @ link MultipartUploadRequest } */ public MultipartUploadRequest addFileToUpload ( String filePath , String parameterName , String fileName , String contentType ) throws FileNotFoundException , IllegalArgumentException { } }
UploadFile file = new UploadFile ( filePath ) ; filePath = file . getPath ( ) ; if ( parameterName == null || "" . equals ( parameterName ) ) { throw new IllegalArgumentException ( "Please specify parameterName value for file: " + filePath ) ; } file . setProperty ( MultipartUploadTask . PROPERTY_PARAM_NAME , parameterName ) ; if ( contentType == null || contentType . isEmpty ( ) ) { contentType = file . getContentType ( context ) ; Logger . debug ( LOG_TAG , "Auto-detected MIME type for " + filePath + " is: " + contentType ) ; } else { Logger . debug ( LOG_TAG , "Content Type set for " + filePath + " is: " + contentType ) ; } file . setProperty ( MultipartUploadTask . PROPERTY_CONTENT_TYPE , contentType ) ; if ( fileName == null || "" . equals ( fileName ) ) { fileName = file . getName ( context ) ; Logger . debug ( LOG_TAG , "Using original file name: " + fileName ) ; } else { Logger . debug ( LOG_TAG , "Using custom file name: " + fileName ) ; } file . setProperty ( MultipartUploadTask . PROPERTY_REMOTE_FILE_NAME , fileName ) ; params . files . add ( file ) ; return this ;
public class ServiceUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( includeCredentials != null ) { request . addPostParam ( "IncludeCredentials" , includeCredentials . toString ( ) ) ; } if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; }
public class SQLContextImpl { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . android . sqlite . SQLContext # sqlBuilder ( ) */ @ Override public StringBuilder sqlBuilder ( ) { } }
StringBuilder builder = this . sqlStringBuilder . get ( ) ; builder . delete ( 0 , builder . length ( ) ) ; return builder ;
public class Messages { /** * Generates a formatted text string given a source string containing " argument markers " of the form " { argNum } " where each argNum must be in the range 0 . . 9. * The result is generated by inserting the toString of each argument into the position indicated in the string . * To insert the " { " character into the output , use a single backslash character to escape it ( i . e . " \ { " ) . The " } " character does not need to be escaped . * @ param format * String the format to use when printing . * @ param args * Object [ ] the arguments to use . * @ return String the formatted message . */ public static String format ( String format , Object [ ] args ) { } }
StringBuilder answer = new StringBuilder ( format . length ( ) + ( args . length * 20 ) ) ; String [ ] argStrings = new String [ args . length ] ; for ( int i = 0 ; i < args . length ; ++ i ) { if ( args [ i ] == null ) argStrings [ i ] = "<null>" ; // $ NON - NLS - 1 $ else argStrings [ i ] = args [ i ] . toString ( ) ; } int lastI = 0 ; for ( int i = format . indexOf ( '{' , 0 ) ; i >= 0 ; i = format . indexOf ( '{' , lastI ) ) { if ( i != 0 && format . charAt ( i - 1 ) == '\\' ) { // It ' s escaped , just print and loop . if ( i != 1 ) answer . append ( format . substring ( lastI , i - 1 ) ) ; answer . append ( '{' ) ; lastI = i + 1 ; } else { // It ' s a format character . if ( i > format . length ( ) - 3 ) { // Bad format , just print and loop . answer . append ( format . substring ( lastI , format . length ( ) ) ) ; lastI = format . length ( ) ; } else { int argnum = ( byte ) Character . digit ( format . charAt ( i + 1 ) , 10 ) ; if ( argnum < 0 || format . charAt ( i + 2 ) != '}' ) { // Bad format , just print and loop . answer . append ( format . substring ( lastI , i + 1 ) ) ; lastI = i + 1 ; } else { // Got a good one ! answer . append ( format . substring ( lastI , i ) ) ; if ( argnum >= argStrings . length ) answer . append ( "<missing argument>" ) ; // $ NON - NLS - 1 $ else answer . append ( argStrings [ argnum ] ) ; lastI = i + 3 ; } } } } if ( lastI < format . length ( ) ) answer . append ( format . substring ( lastI , format . length ( ) ) ) ; return answer . toString ( ) ;
public class NetworkTopology { /** * randomly choose one node from < i > scope < / i > * if scope starts with ~ , choose one from the all nodes except for the * ones in < i > scope < / i > ; otherwise , choose one from < i > scope < / i > * @ param scope range of nodes from which a node will be choosen * @ return the choosen node */ public Node chooseRandom ( String scope ) { } }
netlock . readLock ( ) . lock ( ) ; try { if ( scope . startsWith ( "~" ) ) { return chooseRandom ( NodeBase . ROOT , scope . substring ( 1 ) ) ; } else { return chooseRandom ( scope , null ) ; } } finally { netlock . readLock ( ) . unlock ( ) ; }
public class Queue { /** * Remove given element from this queue . */ public void remove ( QueueElement qe ) { } }
if ( head == qe ) { removeHead ( ) ; } else if ( tail == qe ) { removeTail ( ) ; } else { qe . previous . next = qe . next ; qe . next . previous = qe . previous ; qe . previous = null ; qe . next = null ; qe . queue = null ; numElements -- ; }
public class SharesInner { /** * Refreshes the share metadata with the data from the cloud . * @ param deviceName The device name . * @ param name The share name . * @ param resourceGroupName The resource group name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < Void > refreshAsync ( String deviceName , String name , String resourceGroupName ) { } }
return refreshWithServiceResponseAsync ( deviceName , name , resourceGroupName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class CreateTransformJobRequest { /** * The environment variables to set in the Docker container . We support up to 16 key and values entries in the map . * @ param environment * The environment variables to set in the Docker container . We support up to 16 key and values entries in * the map . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateTransformJobRequest withEnvironment ( java . util . Map < String , String > environment ) { } }
setEnvironment ( environment ) ; return this ;
public class FileMonitor { /** * Remove listener from this file monitor . * @ param fileListener Listener to remove . */ public void removeListener ( FileChangedListener fileListener ) { } }
for ( Iterator < WeakReference < FileChangedListener > > i = listeners_ . iterator ( ) ; i . hasNext ( ) ; ) { WeakReference < FileChangedListener > reference = i . next ( ) ; FileChangedListener listener = reference . get ( ) ; if ( listener == fileListener ) { i . remove ( ) ; break ; } }
public class Formats { /** * Returns a substring of the given string , representing the ' length ' most - right characters */ public static String rightStr ( String str , int length ) { } }
return str . substring ( Math . max ( 0 , str . length ( ) - length ) ) ;
public class MwsUtl { /** * Computes RFC 2104 - compliant HMAC signature for request parameters * Implements AWS Signature , as per following spec : * If Signature Version is 0 , it signs concatenated Action and Timestamp * If Signature Version is 1 , it performs the following : * Sorts all parameters ( including SignatureVersion and excluding Signature , * the value of which is being created ) , ignoring case . * Iterate over the sorted list and append the parameter name ( in original * case ) and then its value . It will not URL - encode the parameter values * before constructing this string . There are no separators . * If Signature Version is 2 , string to sign is based on following : * 1 . The HTTP Request Method followed by an ASCII newline ( % 0A ) 2 . The HTTP * Host header in the form of lowercase host , followed by an ASCII newline . * 3 . The URL encoded HTTP absolute path component of the URI ( up to but not * including the query string parameters ) ; if this is empty use a forward * ' / ' . This parameter is followed by an ASCII newline . 4 . The concatenation * of all query string components ( names and values ) as UTF - 8 characters * which are URL encoded as per RFC 3986 ( hex characters MUST be uppercase ) , * sorted using lexicographic byte ordering . Parameter names are separated * from their values by the ' = ' character ( ASCII character 61 ) , even if the * value is empty . Pairs of parameter and values are separated by the ' & ' * character ( ASCII code 38 ) . * @ param serviceUri * Including host , port , api name , and api version * @ param parameters * @ param signatureVersion * @ param signatureMethod * @ param awsSecretKey * @ return The base64 encoding of the signature . */ static String signParameters ( URI serviceUri , String signatureVersion , String signatureMethod , Map < String , String > parameters , String aswSecretKey ) { } }
parameters . put ( "SignatureVersion" , signatureVersion ) ; String algorithm = "HmacSHA1" ; String stringToSign = null ; if ( "0" . equals ( signatureVersion ) ) { stringToSign = calculateStringToSignV0 ( parameters ) ; } else if ( "1" . equals ( signatureVersion ) ) { stringToSign = calculateStringToSignV1 ( parameters ) ; } else if ( "2" . equals ( signatureVersion ) ) { algorithm = signatureMethod ; parameters . put ( "SignatureMethod" , algorithm ) ; stringToSign = calculateStringToSignV2 ( serviceUri , parameters ) ; } else { throw new IllegalArgumentException ( "Invalid Signature Version specified" ) ; } return sign ( stringToSign , aswSecretKey , algorithm ) ;
public class AmazonPollyClient { /** * Returns a list of SpeechSynthesisTask objects ordered by their creation date . This operation can filter the tasks * by their status , for example , allowing users to list only tasks that are completed . * @ param listSpeechSynthesisTasksRequest * @ return Result of the ListSpeechSynthesisTasks operation returned by the service . * @ throws InvalidNextTokenException * The NextToken is invalid . Verify that it ' s spelled correctly , and then try again . * @ throws ServiceFailureException * An unknown condition has caused a service failure . * @ sample AmazonPolly . ListSpeechSynthesisTasks * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / polly - 2016-06-10 / ListSpeechSynthesisTasks " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListSpeechSynthesisTasksResult listSpeechSynthesisTasks ( ListSpeechSynthesisTasksRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListSpeechSynthesisTasks ( request ) ;
public class AwsSecurityFindingFilters { /** * The creation date / time of the IAM access key related to a finding . * @ param resourceAwsIamAccessKeyCreatedAt * The creation date / time of the IAM access key related to a finding . */ public void setResourceAwsIamAccessKeyCreatedAt ( java . util . Collection < DateFilter > resourceAwsIamAccessKeyCreatedAt ) { } }
if ( resourceAwsIamAccessKeyCreatedAt == null ) { this . resourceAwsIamAccessKeyCreatedAt = null ; return ; } this . resourceAwsIamAccessKeyCreatedAt = new java . util . ArrayList < DateFilter > ( resourceAwsIamAccessKeyCreatedAt ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcColumn ( ) { } }
if ( ifcColumnEClass == null ) { ifcColumnEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 104 ) ; } return ifcColumnEClass ;
public class LayoutRefiner { /** * Bend select atoms around a provided pivot by the specified amount ( r ) . * @ param indexes array of atom indexes * @ param from start offset into the array ( inclusive ) * @ param to end offset into the array ( exclusive ) * @ param pivotAtm the point about which we are pivoting * @ param r radians to bend by */ private void bend ( int [ ] indexes , int from , int to , IAtom pivotAtm , double r ) { } }
double s = Math . sin ( r ) ; double c = Math . cos ( r ) ; Point2d pivot = pivotAtm . getPoint2d ( ) ; for ( int i = from ; i < to ; i ++ ) { Point2d p = mol . getAtom ( indexes [ i ] ) . getPoint2d ( ) ; double x = p . x - pivot . x ; double y = p . y - pivot . y ; double nx = x * c + y * s ; double ny = - x * s + y * c ; p . x = nx + pivot . x ; p . y = ny + pivot . y ; }
public class SpiderService { /** * Delete a batch of objects from the given table . All objects must have an ID * assigned . Deleting an already - deleted object is a no - op . * @ param tableDef Table containing objects to be deleted . * @ param batch { @ link DBObjectBatch } defining objects to be deleted . Only the * _ ID field of each DBObject is used . * @ return { @ link BatchResult } indicating results of the delete . */ public BatchResult deleteBatch ( TableDefinition tableDef , DBObjectBatch batch ) { } }
checkServiceState ( ) ; List < String > objIDs = new ArrayList < > ( ) ; for ( DBObject dbObj : batch . getObjects ( ) ) { Utils . require ( ! Utils . isEmpty ( dbObj . getObjectID ( ) ) , "All objects must have _ID defined" ) ; objIDs . add ( dbObj . getObjectID ( ) ) ; } BatchObjectUpdater batchUpdater = new BatchObjectUpdater ( tableDef ) ; return batchUpdater . deleteBatch ( objIDs ) ;
public class AbstractImmutableMapBuilder { /** * Create a delta consisting of updates . * @ param mutators * mutations to apply */ protected final MapDelta < K , V > genUpdates ( final Map < K , Function < ? super V , ? extends V > > mutators ) { } }
return mutators . isEmpty ( ) ? Nop . instance ( ) : new Update < > ( mutators ) ;
public class MmtfUtils { /** * Set the DSSP type based on a numerical index . * @ param dsspIndex the integer index of the type to set * @ return the instance of the SecStrucType object holding this secondary * structure type . */ public static SecStrucType getSecStructTypeFromDsspIndex ( int dsspIndex ) { } }
String dsspType = DsspType . dsspTypeFromInt ( dsspIndex ) . getDsspType ( ) ; for ( SecStrucType secStrucType : SecStrucType . values ( ) ) { if ( dsspType == secStrucType . name ) { return secStrucType ; } } // Return a null entry . return null ;
public class ListAuditFindingsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListAuditFindingsRequest listAuditFindingsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listAuditFindingsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listAuditFindingsRequest . getTaskId ( ) , TASKID_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getCheckName ( ) , CHECKNAME_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getResourceIdentifier ( ) , RESOURCEIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( listAuditFindingsRequest . getEndTime ( ) , ENDTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ICPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . ICP__XC_OSET : return getXCOset ( ) ; case AfplibPackage . ICP__YC_OSET : return getYCOset ( ) ; case AfplibPackage . ICP__XC_SIZE : return getXCSize ( ) ; case AfplibPackage . ICP__YC_SIZE : return getYCSize ( ) ; case AfplibPackage . ICP__XFIL_SIZE : return getXFilSize ( ) ; case AfplibPackage . ICP__YFIL_SIZE : return getYFilSize ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class PropertiesField { /** * Convert these java properties to a string . * @ param properties The java properties . * @ return The properties string . */ public static String propertiesToString ( Map < String , Object > map ) { } }
String strProperties = null ; Properties properties = Utility . mapToProperties ( map ) ; ByteArrayOutputStream baOut = new ByteArrayOutputStream ( ) ; try { properties . store ( baOut , PROPERTIES_COMMENT ) ; byte [ ] rgBytes = baOut . toByteArray ( ) ; ByteArrayInputStream baIn = new ByteArrayInputStream ( rgBytes ) ; InputStreamReader isIn = new InputStreamReader ( baIn ) ; // byte - > char char [ ] cbuf = new char [ rgBytes . length ] ; isIn . read ( cbuf , 0 , rgBytes . length ) ; if ( cbuf . length == rgBytes . length ) strProperties = new String ( cbuf ) ; } catch ( IOException ex ) { ex . printStackTrace ( ) ; } return strProperties ;
public class DBCleanService { /** * Resolves dialect which it is used in workspace configuration . First of all , * method will try to get parameter { @ link JDBCWorkspaceDataContainer # DB _ DIALECT } from * a configuration . And only then method will try to detect dialect using { @ link DialectDetecter } in case * if dialect is set as { @ link DialectConstants # DB _ DIALECT _ AUTO } . * @ param wsEntry * workspace configuration * @ return dialect * @ throws DBCleanException */ private static String resolveDialect ( Connection jdbcConn , WorkspaceEntry wsEntry ) throws DBCleanException { } }
String dialect = DBInitializerHelper . getDatabaseDialect ( wsEntry ) ; if ( dialect . startsWith ( DBConstants . DB_DIALECT_AUTO ) ) { try { dialect = DialectDetecter . detect ( jdbcConn . getMetaData ( ) ) ; } catch ( SQLException e ) { throw new DBCleanException ( e ) ; } } return dialect ;
public class DbPersistenceManager { /** * { @ inheritDoc } * This method uses shared < code > PreparedStatements < / code > , which must * be used strictly sequentially . Because this method synchronizes on the * persistence manager instance , there is no need to synchronize on the * shared statement . If the method would not be synchronized , the shared * statement must be synchronized . */ public synchronized void store ( NodeReferences refs ) throws ItemStateException { } }
if ( ! initialized ) { throw new IllegalStateException ( "not initialized" ) ; } // check if insert or update boolean update = existsReferencesTo ( refs . getTargetId ( ) ) ; String sql = ( update ) ? nodeReferenceUpdateSQL : nodeReferenceInsertSQL ; try { ByteArrayOutputStream out = new ByteArrayOutputStream ( INITIAL_BUFFER_SIZE ) ; // serialize references Serializer . serialize ( refs , out ) ; Object [ ] params = createParams ( refs . getTargetId ( ) , out . toByteArray ( ) , true ) ; conHelper . exec ( sql , params ) ; // there ' s no need to close a ByteArrayOutputStream // out . close ( ) ; } catch ( Exception e ) { String msg = "failed to write " + refs ; log . error ( msg , e ) ; throw new ItemStateException ( msg , e ) ; }
public class SchemaManager { /** * Creates a schema belonging to the given grantee . */ void createSchema ( HsqlName name , Grantee owner ) { } }
SqlInvariants . checkSchemaNameNotSystem ( name . name ) ; Schema schema = new Schema ( name , owner ) ; schemaMap . add ( name . name , schema ) ;
public class DefaultStreamTokenizer { /** * Checks , if underlying stream has any tokens left * @ return */ private boolean streamHasMoreTokens ( ) { } }
if ( streamTokenizer . ttype != StreamTokenizer . TT_EOF ) { try { streamTokenizer . nextToken ( ) ; } catch ( IOException e1 ) { throw new RuntimeException ( e1 ) ; } } return streamTokenizer . ttype != StreamTokenizer . TT_EOF && streamTokenizer . ttype != - 1 ;
public class IdType { /** * Returns a view of this ID as a fully qualified URL , given a server base and * resource name ( which will only be used if the ID does not already contain * those respective parts ) . Essentially , because IdType can contain either a * complete URL or a partial one ( or even jut a simple ID ) , this method may be * used to translate into a complete URL . * @ param theServerBase * The server base ( e . g . " http : / / example . com / fhir " ) * @ param theResourceType * The resource name ( e . g . " Patient " ) * @ return A fully qualified URL for this ID ( e . g . * " http : / / example . com / fhir / Patient / 1 " ) */ @ Override public IdType withServerBase ( String theServerBase , String theResourceType ) { } }
return new IdType ( theServerBase , theResourceType , getIdPart ( ) , getVersionIdPart ( ) ) ;
public class KamManager { /** * Static main method to launch the KAM Manager tool . * @ param args { @ link String String [ ] } the command - line arguments */ public static void main ( String [ ] args ) throws Exception { } }
KamManager app = new KamManager ( args ) ; app . run ( ) ;
public class IfdTags { /** * Adds a tag to the set . * @ param tag the tag to add */ public void addTag ( TagValue tag ) { } }
// int pos = 0; // while ( pos < tags . size ( ) & & tags . get ( pos ) . getId ( ) < tag . getId ( ) ) pos + + ; // tags . add ( pos , tag ) ; tags . add ( tag ) ; if ( ! hashTagsId . containsKey ( tag . getId ( ) ) ) { hashTagsId . put ( tag . getId ( ) , tag ) ; } Tag t = TiffTags . getTag ( tag . getId ( ) ) ; if ( t != null ) { if ( hashTagsName . containsKey ( t . getName ( ) ) ) { hashTagsName . put ( t . getName ( ) , tag ) ; } }
public class ThreadPoolExecutor { /** * exec . setMaxPoolSize ( N ) ; */ public void setCorePoolSize ( int corePoolSize ) { } }
if ( corePoolSize < 0 ) throw new IllegalArgumentException ( ) ; int delta = corePoolSize - this . corePoolSize ; this . corePoolSize = corePoolSize ; if ( workerCountOf ( ctl . get ( ) ) > corePoolSize ) interruptIdleWorkers ( ) ; else if ( delta > 0 ) { // We don ' t really know how many new threads are " needed " . // As a heuristic , prestart enough new workers ( up to new // core size ) to handle the current number of tasks in // queue , but stop if queue becomes empty while doing so . int k = Math . min ( delta , workQueue . size ( ) ) ; while ( k -- > 0 && addWorker ( null , true ) ) { if ( workQueue . isEmpty ( ) ) break ; } }