signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FileUtil { /** * Adds folder to the archive . * @ param path path to the folder * @ param srcFolder folder name * @ param zip zip archive * @ throws IOException */ public static void addFolderToZip ( String path , String srcFolder , ZipOutputStream zip ) throws IOException { } }
File folder = new File ( srcFolder ) ; if ( folder . list ( ) . length == 0 ) { addFileToZip ( path , srcFolder , zip , true ) ; } else { for ( String fileName : folder . list ( ) ) { if ( path . equals ( "" ) ) { addFileToZip ( folder . getName ( ) , srcFolder + "/" + fileName , zip , false ) ; } else { addFileToZip ( path + "/" + folder . getName ( ) , srcFolder + "/" + fileName , zip , false ) ; } } }
public class ObjectMapperFactory { /** * Builds the core Jackson { @ link ObjectMapper } from the optional configuration and { @ link JsonFactory } . * @ param jacksonConfiguration The configuration * @ param jsonFactory The JSON factory * @ return The { @ link ObjectMapper } */ @ Singleton @ Primary @ BootstrapContextCompatible public ObjectMapper objectMapper ( @ Nullable JacksonConfiguration jacksonConfiguration , @ Nullable JsonFactory jsonFactory ) { } }
ObjectMapper objectMapper = jsonFactory != null ? new ObjectMapper ( jsonFactory ) : new ObjectMapper ( ) ; final boolean hasConfiguration = jacksonConfiguration != null ; if ( ! hasConfiguration || jacksonConfiguration . isModuleScan ( ) ) { objectMapper . findAndRegisterModules ( ) ; } objectMapper . registerModules ( jacksonModules ) ; SimpleModule module = new SimpleModule ( MICRONAUT_MODULE ) ; for ( JsonSerializer serializer : serializers ) { Class < ? extends JsonSerializer > type = serializer . getClass ( ) ; Type annotation = type . getAnnotation ( Type . class ) ; if ( annotation != null ) { Class [ ] value = annotation . value ( ) ; for ( Class aClass : value ) { module . addSerializer ( aClass , serializer ) ; } } else { Optional < Class > targetType = GenericTypeUtils . resolveSuperGenericTypeArgument ( type ) ; if ( targetType . isPresent ( ) ) { module . addSerializer ( targetType . get ( ) , serializer ) ; } else { module . addSerializer ( serializer ) ; } } } for ( JsonDeserializer deserializer : deserializers ) { Class < ? extends JsonDeserializer > type = deserializer . getClass ( ) ; Type annotation = type . getAnnotation ( Type . class ) ; if ( annotation != null ) { Class [ ] value = annotation . value ( ) ; for ( Class aClass : value ) { module . addDeserializer ( aClass , deserializer ) ; } } else { Optional < Class > targetType = GenericTypeUtils . resolveSuperGenericTypeArgument ( type ) ; targetType . ifPresent ( aClass -> module . addDeserializer ( aClass , deserializer ) ) ; } } objectMapper . registerModule ( module ) ; for ( BeanSerializerModifier beanSerializerModifier : beanSerializerModifiers ) { objectMapper . setSerializerFactory ( objectMapper . getSerializerFactory ( ) . withSerializerModifier ( beanSerializerModifier ) ) ; } objectMapper . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) ; objectMapper . configure ( DeserializationFeature . ACCEPT_SINGLE_VALUE_AS_ARRAY , true ) ; objectMapper . configure ( DeserializationFeature . UNWRAP_SINGLE_VALUE_ARRAYS , true ) ; if ( hasConfiguration ) { ObjectMapper . DefaultTyping defaultTyping = jacksonConfiguration . getDefaultTyping ( ) ; if ( defaultTyping != null ) { objectMapper . enableDefaultTyping ( defaultTyping ) ; } JsonInclude . Include include = jacksonConfiguration . getSerializationInclusion ( ) ; if ( include != null ) { objectMapper . setSerializationInclusion ( include ) ; } String dateFormat = jacksonConfiguration . getDateFormat ( ) ; if ( dateFormat != null ) { objectMapper . setDateFormat ( new SimpleDateFormat ( dateFormat ) ) ; } Locale locale = jacksonConfiguration . getLocale ( ) ; if ( locale != null ) { objectMapper . setLocale ( locale ) ; } TimeZone timeZone = jacksonConfiguration . getTimeZone ( ) ; if ( timeZone != null ) { objectMapper . setTimeZone ( timeZone ) ; } PropertyNamingStrategy propertyNamingStrategy = jacksonConfiguration . getPropertyNamingStrategy ( ) ; if ( propertyNamingStrategy != null ) { objectMapper . setPropertyNamingStrategy ( propertyNamingStrategy ) ; } jacksonConfiguration . getSerializationSettings ( ) . forEach ( objectMapper :: configure ) ; jacksonConfiguration . getDeserializationSettings ( ) . forEach ( objectMapper :: configure ) ; jacksonConfiguration . getMapperSettings ( ) . forEach ( objectMapper :: configure ) ; jacksonConfiguration . getParserSettings ( ) . forEach ( objectMapper :: configure ) ; jacksonConfiguration . getGeneratorSettings ( ) . forEach ( objectMapper :: configure ) ; } return objectMapper ;
public class InternalChannelz { /** * Adds a server socket . */ public void addServerSocket ( InternalInstrumented < ServerStats > server , InternalInstrumented < SocketStats > socket ) { } }
ServerSocketMap serverSockets = perServerSockets . get ( id ( server ) ) ; assert serverSockets != null ; add ( serverSockets , socket ) ;
public class Context { /** * Set the current template . * @ param template - current template */ public Context setTemplate ( Template template ) { } }
checkThread ( ) ; if ( template != null ) { setEngine ( template . getEngine ( ) ) ; } this . template = template ; return this ;
public class HikariPool { /** * { @ inheritDoc } */ @ Override public synchronized void suspendPool ( ) { } }
if ( suspendResumeLock == SuspendResumeLock . FAUX_LOCK ) { throw new IllegalStateException ( poolName + " - is not suspendable" ) ; } else if ( poolState != POOL_SUSPENDED ) { suspendResumeLock . suspend ( ) ; poolState = POOL_SUSPENDED ; }
public class AmazonIdentityManagementClient { /** * Lists all managed policies that are attached to the specified IAM role . * An IAM role can also have inline policies embedded with it . To list the inline policies for a role , use the * < a > ListRolePolicies < / a > API . For information about policies , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / policies - managed - vs - inline . html " > Managed Policies and * Inline Policies < / a > in the < i > IAM User Guide < / i > . * You can paginate the results using the < code > MaxItems < / code > and < code > Marker < / code > parameters . You can use the * < code > PathPrefix < / code > parameter to limit the list of policies to only those matching the specified path prefix . * If there are no policies attached to the specified role ( or none that match the specified path prefix ) , the * operation returns an empty list . * @ param listAttachedRolePoliciesRequest * @ return Result of the ListAttachedRolePolicies operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws InvalidInputException * The request was rejected because an invalid or out - of - range value was supplied for an input parameter . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . ListAttachedRolePolicies * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / ListAttachedRolePolicies " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListAttachedRolePoliciesResult listAttachedRolePolicies ( ListAttachedRolePoliciesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListAttachedRolePolicies ( request ) ;
public class DescribeTransitGatewayVpcAttachmentsResult { /** * Information about the VPC attachments . * @ return Information about the VPC attachments . */ public java . util . List < TransitGatewayVpcAttachment > getTransitGatewayVpcAttachments ( ) { } }
if ( transitGatewayVpcAttachments == null ) { transitGatewayVpcAttachments = new com . amazonaws . internal . SdkInternalList < TransitGatewayVpcAttachment > ( ) ; } return transitGatewayVpcAttachments ;
public class IdentityDescriptionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( IdentityDescription identityDescription , ProtocolMarshaller protocolMarshaller ) { } }
if ( identityDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( identityDescription . getIdentityId ( ) , IDENTITYID_BINDING ) ; protocolMarshaller . marshall ( identityDescription . getLogins ( ) , LOGINS_BINDING ) ; protocolMarshaller . marshall ( identityDescription . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( identityDescription . getLastModifiedDate ( ) , LASTMODIFIEDDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Neo4JVertex { /** * Generates a Cypher MATCH statement for the vertex , example : * MATCH ( alias ) WHERE alias . id = { id } AND ( alias : Label1 OR alias : Label2) * @ param alias The node alias . * @ param idParameterName The name of the parameter that contains the vertex id . * @ return the Cypher MATCH predicate or < code > null < / code > if not required to MATCH the vertex . */ public String matchStatement ( String alias , String idParameterName ) { } }
Objects . requireNonNull ( alias , "alias cannot be null" ) ; Objects . requireNonNull ( idParameterName , "idParameterName cannot be null" ) ; // create statement return "MATCH " + matchPattern ( alias ) + " WHERE " + matchPredicate ( alias , idParameterName ) ;
public class Filtering { /** * Creates an iterator yielding last n elements from the source iterable . * Consuming the resulting iterator yields an IllegalArgumentException if * not enough elements can be fetched . E . g : * < code > takeLast ( 2 , [ 1 , 2 , 3 ] ) - > [ 2 , 3 ] < / code > * @ param < E > the iterable element type * @ param howMany number of elements to be yielded * @ param from the source iterable * @ return the resulting iterator */ public static < E > Iterator < E > takeLast ( int howMany , Iterable < E > from ) { } }
dbc . precondition ( from != null , "cannot call last with a null iterable" ) ; return takeLast ( howMany , from . iterator ( ) ) ;
public class RepositoryApi { /** * Creates a tag on a particular ref of a given project . A message and a File instance containing the * release notes are optional . This method is the same as { @ link # createTag ( Object , String , String , String , String ) } , * but instead allows the release notes to be supplied in a file . * < pre > < code > GitLab Endpoint : POST / projects / : id / repository / tags < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param tagName the name of the tag , must be unique for the project * @ param ref the git ref to place the tag on * @ param message the message to included with the tag ( optional ) * @ param releaseNotesFile a whose contents are the release notes ( optional ) * @ return a Tag instance containing info on the newly created tag * @ throws GitLabApiException if any exception occurs * @ deprecated Replaced by TagsApi . CreateTag ( Object , String , String , String , File ) */ public Tag createTag ( Object projectIdOrPath , String tagName , String ref , String message , File releaseNotesFile ) throws GitLabApiException { } }
String releaseNotes ; if ( releaseNotesFile != null ) { try { releaseNotes = FileUtils . readFileContents ( releaseNotesFile ) ; } catch ( IOException ioe ) { throw ( new GitLabApiException ( ioe ) ) ; } } else { releaseNotes = null ; } return ( createTag ( projectIdOrPath , tagName , ref , message , releaseNotes ) ) ;
public class RollbackBean { /** * First store a list of persons then we * store the article using a failure store * method in ArticleManager . * @ ejb : interface - method */ public void rollbackOtherBeanUsing_2 ( ArticleVO article , List persons ) { } }
log . info ( "rollbackOtherBeanUsing_2 method was called" ) ; ArticleManagerPBLocal am = getArticleManager ( ) ; PersonManagerPBLocal pm = getPersonManager ( ) ; pm . storePersons ( persons ) ; am . failureStore ( article ) ;
public class DirectoryFactory { /** * < p > < em > INTERNAL API , only ot be used in class hierarchy < / em > < / p > * Creates a new { @ link DispatchKey } based on the directory - key and * relative path specified , see { @ link Directory # addWatchedDirectory ( WatchedDirectory ) } for further information . * @ param pDirectoryKey Directory - key , must not be { @ code null } * @ param pRelativePath Relative path , must not be { @ code null } * @ return New file - key , never { @ code null } */ DispatchKey newKey ( final Object pDirectoryKey , final Path pRelativePath ) { } }
return fileKeyFactory . newKey ( pDirectoryKey , pRelativePath ) ;
public class FileIoUtil { /** * Reads a file from classpath to a list of String using default charset and log any exception . * @ param _ fileName * @ return file contents or empty list */ public static List < String > readFileFromClassPathAsList ( String _fileName ) { } }
List < String > result = readFileFromClassPathAsList ( _fileName , Charset . defaultCharset ( ) , false ) ; return result == null ? new ArrayList < String > ( ) : result ;
public class QrCode { /** * Returns true if input is in the Alphanumeric set ( see Table J . 1) */ private static boolean isAlpha ( int c ) { } }
return ( c >= '0' && c <= '9' ) || ( c >= 'A' && c <= 'Z' ) || ( c == ' ' ) || ( c == '$' ) || ( c == '%' ) || ( c == '*' ) || ( c == '+' ) || ( c == '-' ) || ( c == '.' ) || ( c == '/' ) || ( c == ':' ) ;
public class Ftp { /** * List data of a ftp connection * @ return FTPCLient * @ throws PageException * @ throws IOException */ private AFTPClient actionListDir ( ) throws PageException , IOException { } }
required ( "name" , name ) ; required ( "directory" , directory ) ; AFTPClient client = getClient ( ) ; FTPFile [ ] files = client . listFiles ( directory ) ; if ( files == null ) files = new FTPFile [ 0 ] ; pageContext . setVariable ( name , toQuery ( files , "ftp" , directory , client . getRemoteAddress ( ) . getHostName ( ) ) ) ; writeCfftp ( client ) ; return client ;
public class SqlExecutor { /** * 批量执行非查询语句 < br > * 语句包括 插入 、 更新 、 删除 < br > * 此方法不会关闭Connection * @ param conn 数据库连接对象 * @ param sqls SQL列表 * @ return 每个SQL执行影响的行数 * @ throws SQLException SQL执行异常 * @ since 4.5.6 */ public static int [ ] executeBatch ( Connection conn , Iterable < String > sqls ) throws SQLException { } }
Statement statement = null ; try { statement = conn . createStatement ( ) ; for ( String sql : sqls ) { statement . addBatch ( sql ) ; } return statement . executeBatch ( ) ; } finally { DbUtil . close ( statement ) ; }
public class Matchers { /** * Throwable matcher with only a type argument * @ param type the throwable type to match * @ param < T > type of the matched throwable * @ return the matcher */ public static < T extends Throwable > Matcher < T > isThrowable ( Class < ? > type ) { } }
return IsThrowable . isThrowable ( type ) ;
public class DateHelper { /** * Gets the UTC date and time in ' yyyy - MM - dd ' T ' HH : mm : ss . SSS ' Z ' ' format . * @ return UTC date and time . */ public static String getUTC ( long time ) { } }
SimpleDateFormat sdf = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" , Locale . ENGLISH ) ; sdf . setTimeZone ( TimeZone . getTimeZone ( "UTC" ) ) ; return sdf . format ( time ) ;
public class ExpressionTreeRewriter { /** * Invoke the default rewrite logic explicitly . Specifically , it skips the invocation of the expression rewriter for the provided node . */ @ SuppressWarnings ( "unchecked" ) public < T extends Expression > T defaultRewrite ( T node , C context ) { } }
return ( T ) visitor . process ( node , new Context < > ( context , true ) ) ;
public class PoolManager { /** * Reload allocations file if it hasn ' t been loaded in a while * return true if reloaded */ public boolean reloadAllocsIfNecessary ( ) { } }
if ( allocFile == null ) { // A warning has been logged when allocFile is null . // We should just return here . return false ; } long time = System . currentTimeMillis ( ) ; boolean reloaded = false ; if ( time > lastReloadAttempt + ALLOC_RELOAD_INTERVAL ) { lastReloadAttempt = time ; try { File file = new File ( allocFile ) ; long lastModified = file . lastModified ( ) ; if ( lastModified > lastSuccessfulReload && time > lastModified + ALLOC_RELOAD_WAIT ) { reloadAllocs ( ) ; reloaded = true ; lastSuccessfulReload = time ; lastReloadAttemptFailed = false ; } } catch ( Exception e ) { // Throwing the error further out here won ' t help - the RPC thread // will catch it and report it in a loop . Instead , just log it and // hope somebody will notice from the log . // We log the error only on the first failure so we don ' t fill up the // JobTracker ' s log with these messages . if ( ! lastReloadAttemptFailed ) { LOG . error ( "Failed to reload allocations file - " + "will use existing allocations." , e ) ; } lastReloadAttemptFailed = true ; } } return reloaded ;
public class LiveReloadServer { /** * Gracefully stop the livereload server . * @ throws IOException in case of I / O errors */ public void stop ( ) throws IOException { } }
synchronized ( this . monitor ) { if ( this . listenThread != null ) { closeAllConnections ( ) ; try { this . executor . shutdown ( ) ; this . executor . awaitTermination ( 1 , TimeUnit . MINUTES ) ; } catch ( InterruptedException ex ) { Thread . currentThread ( ) . interrupt ( ) ; } this . serverSocket . close ( ) ; try { this . listenThread . join ( ) ; } catch ( InterruptedException ex ) { Thread . currentThread ( ) . interrupt ( ) ; } this . listenThread = null ; this . serverSocket = null ; } }
public class VariableAssignment { /** * getter for variableName - gets * @ generated * @ return value of the feature */ public String getVariableName ( ) { } }
if ( VariableAssignment_Type . featOkTst && ( ( VariableAssignment_Type ) jcasType ) . casFeat_variableName == null ) jcasType . jcas . throwFeatMissing ( "variableName" , "ch.epfl.bbp.uima.types.VariableAssignment" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( VariableAssignment_Type ) jcasType ) . casFeatCode_variableName ) ;
public class CommandRegistry { /** * if { @ link # allowCommandsWithUsername } is enabled , the username of the bot is removed from * the command * @ param command Command to simplify * @ return Simplified command */ private String removeUsernameFromCommandIfNeeded ( String command ) { } }
if ( allowCommandsWithUsername ) { return command . replace ( "@" + botUsername , "" ) . trim ( ) ; } return command ;
public class OperationParkerImpl { /** * invalidated waiting ops will removed from queue eventually by notifiers . */ public void onMemberLeft ( MemberImpl leftMember ) { } }
for ( WaitSet waitSet : waitSetMap . values ( ) ) { waitSet . invalidateAll ( leftMember . getUuid ( ) ) ; }
public class JarFileResource { /** * Returns true if the respresenetd resource exists . */ public boolean exists ( ) { } }
if ( _exists ) return true ; if ( _urlString . endsWith ( "!/" ) ) { String file_url = _urlString . substring ( 4 , _urlString . length ( ) - 2 ) ; try { return newResource ( file_url ) . exists ( ) ; } catch ( Exception e ) { LogSupport . ignore ( log , e ) ; return false ; } } boolean check = checkConnection ( ) ; // Is this a root URL ? if ( _jarUrl != null && _path == null ) { // Then if it exists it is a directory _directory = check ; return true ; } else { // Can we find a file for it ? JarFile jarFile = null ; if ( check ) // Yes jarFile = _jarFile ; else { // No - so lets look if the root entry exists . try { jarFile = ( ( JarURLConnection ) ( ( new URL ( _jarUrl ) ) . openConnection ( ) ) ) . getJarFile ( ) ; } catch ( Exception e ) { LogSupport . ignore ( log , e ) ; } } // Do we need to look more closely ? if ( jarFile != null && _entry == null && ! _directory ) { // OK - we have a JarFile , lets look at the entries for our path Enumeration e = jarFile . entries ( ) ; while ( e . hasMoreElements ( ) ) { JarEntry entry = ( JarEntry ) e . nextElement ( ) ; String name = entry . getName ( ) . replace ( '\\' , '/' ) ; // Do we have a match if ( name . equals ( _path ) ) { _entry = entry ; // Is the match a directory _directory = _path . endsWith ( "/" ) ; break ; } else if ( _path . endsWith ( "/" ) ) { if ( name . startsWith ( _path ) ) { _directory = true ; break ; } } else if ( name . startsWith ( _path ) && name . length ( ) > _path . length ( ) && name . charAt ( _path . length ( ) ) == '/' ) { _directory = true ; break ; } } } } _exists = ( _directory || _entry != null ) ; return _exists ;
public class DiskUtils { /** * Returns directory name to save cache data in the external storage < p > * This method always returns path of external storage even if it does not exist . < br > * As such , make sure to call isExternalStorageMounted method as state - testing method and then call this function only if state - testing method returns true . * @ param context Context to get external storage information * @ return String containing cache directory name */ public static String getExternalDirPath ( Context context ) { } }
return Environment . getExternalStorageDirectory ( ) . getAbsolutePath ( ) + DATA_FOLDER + File . separator + context . getPackageName ( ) + CACHE_FOLDER ;
public class NodeReportsInner { /** * Retrieve the Dsc node report data by node id and report id . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param nodeId The Dsc node id . * @ param reportId The report id . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DscNodeReportInner object */ public Observable < DscNodeReportInner > getAsync ( String resourceGroupName , String automationAccountName , String nodeId , String reportId ) { } }
return getWithServiceResponseAsync ( resourceGroupName , automationAccountName , nodeId , reportId ) . map ( new Func1 < ServiceResponse < DscNodeReportInner > , DscNodeReportInner > ( ) { @ Override public DscNodeReportInner call ( ServiceResponse < DscNodeReportInner > response ) { return response . body ( ) ; } } ) ;
public class KeyVaultClientBaseImpl { /** * Creates or updates a new storage account . This operation requires the storage / set permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param storageAccountName The name of the storage account . * @ param resourceId Storage account resource id . * @ param activeKeyName Current active storage account key name . * @ param autoRegenerateKey whether keyvault should manage the storage account for the user . * @ param regenerationPeriod The key regeneration time duration specified in ISO - 8601 format . * @ param storageAccountAttributes The attributes of the storage account . * @ param tags Application specific metadata in the form of key - value pairs . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < StorageBundle > setStorageAccountAsync ( String vaultBaseUrl , String storageAccountName , String resourceId , String activeKeyName , boolean autoRegenerateKey , String regenerationPeriod , StorageAccountAttributes storageAccountAttributes , Map < String , String > tags , final ServiceCallback < StorageBundle > serviceCallback ) { } }
return ServiceFuture . fromResponse ( setStorageAccountWithServiceResponseAsync ( vaultBaseUrl , storageAccountName , resourceId , activeKeyName , autoRegenerateKey , regenerationPeriod , storageAccountAttributes , tags ) , serviceCallback ) ;
public class FlowQueueService { /** * Returns the flows currently listed in the given { @ link Flow . Status } * @ param cluster The cluster where flows have run * @ param status The flows ' status * @ param limit Return up to this many Flow instances * @ param user Filter flows returned to only this user ( if present ) * @ param startRow Start results at this key . Use this in combination with * { @ code limit } to support pagination through the results . * @ return a list of up to { @ code limit } Flows * @ throws IOException in the case of an error retrieving the data */ public List < Flow > getFlowsForStatus ( String cluster , Flow . Status status , int limit , String user , byte [ ] startRow ) throws IOException { } }
byte [ ] rowPrefix = ByteUtil . join ( Constants . SEP_BYTES , Bytes . toBytes ( cluster ) , status . code ( ) , Constants . EMPTY_BYTES ) ; if ( startRow == null ) { startRow = rowPrefix ; } Scan scan = new Scan ( startRow ) ; FilterList filters = new FilterList ( FilterList . Operator . MUST_PASS_ALL ) ; // early out when prefix ends filters . addFilter ( new WhileMatchFilter ( new PrefixFilter ( rowPrefix ) ) ) ; if ( user != null ) { SingleColumnValueFilter userFilter = new SingleColumnValueFilter ( Constants . INFO_FAM_BYTES , USER_NAME_COL_BYTES , CompareFilter . CompareOp . EQUAL , Bytes . toBytes ( user ) ) ; userFilter . setFilterIfMissing ( true ) ; filters . addFilter ( userFilter ) ; } scan . setFilter ( filters ) ; // TODO : need to constrain this by timerange as well to prevent unlimited // scans // get back the results in a single response scan . setCaching ( limit ) ; List < Flow > results = new ArrayList < Flow > ( limit ) ; ResultScanner scanner = null ; Table flowQueueTable = null ; try { flowQueueTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . FLOW_QUEUE_TABLE ) ) ; scanner = flowQueueTable . getScanner ( scan ) ; int cnt = 0 ; for ( Result r : scanner ) { Flow flow = createFlowFromResult ( r ) ; if ( flow != null ) { cnt ++ ; results . add ( flow ) ; } if ( cnt >= limit ) { break ; } } } finally { try { if ( scanner != null ) { scanner . close ( ) ; } } finally { if ( flowQueueTable != null ) { flowQueueTable . close ( ) ; } } } return results ;
public class UpdateUserDefinedFunctionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateUserDefinedFunctionRequest updateUserDefinedFunctionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateUserDefinedFunctionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateUserDefinedFunctionRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( updateUserDefinedFunctionRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( updateUserDefinedFunctionRequest . getFunctionName ( ) , FUNCTIONNAME_BINDING ) ; protocolMarshaller . marshall ( updateUserDefinedFunctionRequest . getFunctionInput ( ) , FUNCTIONINPUT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Channel { /** * TODO add other methods to secure closing behavior */ @ Override public E take ( ) throws InterruptedException { } }
synchronized ( waiting ) { if ( closed ) throw new InterruptedException ( "Channel closed" ) ; waiting . add ( Thread . currentThread ( ) ) ; } E e = super . take ( ) ; synchronized ( waiting ) { waiting . remove ( Thread . currentThread ( ) ) ; } return e ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ArcByBulgeType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link ArcByBulgeType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "ArcByBulge" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "ArcStringByBulge" ) public JAXBElement < ArcByBulgeType > createArcByBulge ( ArcByBulgeType value ) { } }
return new JAXBElement < ArcByBulgeType > ( _ArcByBulge_QNAME , ArcByBulgeType . class , null , value ) ;
public class PermDAO { /** * Add a single Permission to the Role ' s Permission Collection * @ param trans * @ param roleFullName * @ param perm * @ param type * @ param action * @ return */ public Result < Void > addRole ( AuthzTrans trans , PermDAO . Data perm , String roleFullName ) { } }
// Note : Prepared Statements for Collection updates aren ' t supported // ResultSet rv = try { getSession ( trans ) . execute ( UPDATE_SP + TABLE + " SET roles = roles + {'" + roleFullName + "'} " + "WHERE " + "ns = '" + perm . ns + "' AND " + "type = '" + perm . type + "' AND " + "instance = '" + perm . instance + "' AND " + "action = '" + perm . action + "';" ) ; } catch ( DriverException | APIException | IOException e ) { reportPerhapsReset ( trans , e ) ; return Result . err ( Result . ERR_Backend , CassAccess . ERR_ACCESS_MSG ) ; } wasModified ( trans , CRUD . update , perm , "Added role " + roleFullName + " to perm " + perm . ns + '.' + perm . type + '|' + perm . instance + '|' + perm . action ) ; return Result . ok ( ) ;
public class ShowCumulatedProducersAction { /** * { @ inheritDoc } */ @ Override protected String getLinkToCurrentPage ( HttpServletRequest req ) { } }
return "mskShowCumulatedProducers" + '?' + PARAM_DECORATOR + '=' + req . getParameter ( PARAM_DECORATOR ) + ( ! StringUtils . isEmpty ( req . getParameter ( PARAM_CATEGORY ) ) ? ( '&' + PARAM_CATEGORY + '=' + req . getParameter ( PARAM_CATEGORY ) ) : "" ) + ( ! StringUtils . isEmpty ( req . getParameter ( PARAM_SUBSYSTEM ) ) ? ( '&' + PARAM_SUBSYSTEM + '=' + req . getParameter ( PARAM_SUBSYSTEM ) ) : "" ) ;
public class UserGroup { /** * MakeScreen Method . */ public ScreenParent makeScreen ( ScreenLoc itsLocation , ComponentParent parentScreen , int iDocMode , Map < String , Object > properties ) { } }
ScreenParent screen = null ; if ( ( iDocMode & ScreenConstants . DOC_MODE_MASK ) == ScreenConstants . DETAIL_MODE ) screen = Record . makeNewScreen ( UserPermissionModel . USER_PERMISSION_GRID_SCREEN_CLASS , itsLocation , parentScreen , iDocMode | ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties , this , true ) ; else if ( ( iDocMode & ScreenConstants . DOC_MODE_MASK ) == UserGroup . USER_INFO_DETAIL_SCREEN ) screen = Record . makeNewScreen ( UserInfoModel . USER_INFO_GRID_SCREEN_CLASS , itsLocation , parentScreen , iDocMode | ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties , this , true ) ; else if ( ( iDocMode & ScreenConstants . MAINT_MODE ) == ScreenConstants . MAINT_MODE ) screen = Record . makeNewScreen ( USER_GROUP_SCREEN_CLASS , itsLocation , parentScreen , iDocMode | ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties , this , true ) ; else screen = Record . makeNewScreen ( USER_GROUP_GRID_SCREEN_CLASS , itsLocation , parentScreen , iDocMode | ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties , this , true ) ; return screen ;
public class ToStringStyle { /** * < p > Append to the < code > toString < / code > a summary of an * < code > int < / code > array . < / p > * @ param buffer the < code > StringBuilder < / code > to populate * @ param fieldName the field name , typically not used as already appended * @ param array the array to add to the < code > toString < / code > , * not < code > null < / code > */ protected void appendSummary ( StringBuilder buffer , String fieldName , int [ ] array ) { } }
appendSummarySize ( buffer , fieldName , array . length ) ;
public class LocIterator { /** * Get next window of specified size , then increment position by specified amount . * @ return Location of next window . * @ param windowSize Size of window to get . * @ param increment Amount by which to shift position . If increment is positive , the position is shifted * toward the end of the bounding location ; if increment is negative , the position is shifted toward * the beginning of the bounding location . * @ throws IndexOutOfBoundsException The next window was not within the bounding location . * @ throws IllegalArgumentException The increment was zero , or windowSize was not positive . */ public Location next ( int windowSize , int increment ) { } }
if ( windowSize <= 0 ) { throw new IllegalArgumentException ( "Window size must be positive." ) ; } if ( increment == 0 ) { throw new IllegalArgumentException ( "Increment must be non-zero." ) ; } Location r ; try { if ( increment > 0 ) { r = mBounds . suffix ( mPosition ) . prefix ( windowSize ) ; } else { if ( mPosition == 0 ) { r = mBounds . suffix ( - windowSize ) ; } else { r = mBounds . prefix ( mPosition ) . suffix ( - windowSize ) ; } } mPosition += increment ; } catch ( Exception e ) { throw new IndexOutOfBoundsException ( e . toString ( ) ) ; } return r ;
public class JvmTypeParameterDeclaratorImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_TYPE_PARAMETER_DECLARATOR__TYPE_PARAMETERS : getTypeParameters ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class MiniSat { /** * Returns a new Glucose solver . * @ param f the formula factory * @ return the solver */ public static MiniSat glucose ( final FormulaFactory f ) { } }
return new MiniSat ( f , SolverStyle . GLUCOSE , new MiniSatConfig . Builder ( ) . build ( ) , new GlucoseConfig . Builder ( ) . build ( ) ) ;
public class DistributionPointFetcher { /** * Download CRL from given URI . */ private static X509CRL getCRL ( URIName name ) throws CertStoreException { } }
URI uri = name . getURI ( ) ; if ( debug != null ) { debug . println ( "Trying to fetch CRL from DP " + uri ) ; } CertStore ucs = null ; try { ucs = URICertStore . getInstance ( new URICertStore . URICertStoreParameters ( uri ) ) ; } catch ( InvalidAlgorithmParameterException | NoSuchAlgorithmException e ) { if ( debug != null ) { debug . println ( "Can't create URICertStore: " + e . getMessage ( ) ) ; } return null ; } Collection < ? extends CRL > crls = ucs . getCRLs ( null ) ; if ( crls . isEmpty ( ) ) { return null ; } else { return ( X509CRL ) crls . iterator ( ) . next ( ) ; }
public class DuffyModel { /** * Duffy function evaluation . * @ param input * initial condition values for every link . The structure is : * < br > * < ul > * < li > link1 initial discharge < / li > * < li > link2 initial discharge < / li > * < li > link3 initial discharge < / li > * < li > . . . < / li > * < li > linkn initial discharge < / li > * < li > link1 subsurface and baseflow < / li > * < li > link2 subsurface and baseflow < / li > * < li > . . . < / li > * < li > linkn subsurface and baseflow < / li > * < li > link1 water storage in non saturated zone of the hillslope < / li > * < li > link2 water storage in non saturated zone of the hillslope < / li > * < li > . . . < / li > * < li > linkn water storage in non saturated zone of the hillslope < / li > * < li > link1 water storage in saturated zone of the hillslope < / li > * < li > link2 water storage in saturated zone of the hillslope < / li > * < li > . . . < / li > * < li > linkn water storage in saturated zone of the hillslope < / li > * < / ul > * @ param rainArray * the array of precipitation ( in mm / h ) for each hillslope * centroid ( to be ordered in a consistent way with the * linksList ) * @ param etpArray * @ param timeinMinutes * the time * @ return */ public double [ ] eval ( double currentTimeInMinutes , double [ ] input , double [ ] rainArray , double [ ] etpArray , boolean isAtFinalSubtimestep ) { } }
// the input ' s length is twice the number of links . . . the first half // corresponds to links // discharge and the second to hillslopes storage // System . out . println ( input . length ) ; // define the month long currentTimeInMillis = ( long ) ( currentTimeInMinutes * 60.0 * 1000.0 ) ; int linksNum = orderedHillslopes . size ( ) ; // linksConectionStruct . headsArray . length ; // double mstold = 0.0; double [ ] output = new double [ input . length ] ; for ( int i = linksNum - 1 ; i >= 0 ; i -- ) { // start from the last pieces HillSlopeDuffy currentHillslope = ( HillSlopeDuffy ) orderedHillslopes . get ( i ) ; Parameters parameters = currentHillslope . getParameters ( ) ; /* * NOTE : Initial conditions are . . . input [ i ] for link discharge * input [ i + nLi ] for link base flow input [ i + 2 * nLi ] for unsaturated * hillslope S1 input [ i + 3 * nLi ] for saturated hillslope S2 . input [ ] * is updated for each time step in DiffEqSolver . RKF . */ double prec_mphr = rainArray [ i ] / 1000.0 ; // input precipitation is in mm / h double area_m2 = currentHillslope . getHillslopeArea ( ) ; // automatically in m2 from the features /* * Added some check for phisic consistency of the parameters */ // if ( input [ i + 3 * linksNum ] ! = input [ i + 3 * linksNum ] ) { // System . out . println ( ) ; double minsupdischarge = parameters . getqqsupmin ( ) * currentHillslope . getUpstreamArea ( null ) / 1E6 ; if ( input [ i ] < minsupdischarge ) { input [ i ] = minsupdischarge ; // System . out // . println ( // " Current superficial discharge is less than the minimum value , setted to it for the basin " // + currentHillslope . getHillslopeId ( ) ) ; } double minsubdischarge = parameters . getqqsubmin ( ) * currentHillslope . getUpstreamArea ( null ) / 1E6 ; if ( input [ i + linksNum ] < minsubdischarge ) { input [ i + linksNum ] = minsubdischarge ; // System . out // . println ( // " Current subsuperficial discharge is less than the minimum value , setted to it for the basin " // + currentHillslope . getHillslopeId ( ) ) ; } if ( input [ i + 2 * linksNum ] < parameters . getS1residual ( ) ) { input [ i + 2 * linksNum ] = parameters . getS1residual ( ) ; // System . out // . println ( // " Current S1 parameter is less than the minimum value , setted to it for the basin " // + currentHillslope . getHillslopeId ( ) ) ; } if ( input [ i + 3 * linksNum ] < parameters . getS2residual ( ) ) { input [ i + 3 * linksNum ] = parameters . getS2residual ( ) ; // System . out // . println ( // " Current S2 parameter is less than the minimum value , setted to it for the basin " // + currentHillslope . getHillslopeId ( ) ) ; } /* HILLSLOPE FLUX CONDITIONS */ satsurf = parameters . getS2Param ( ) * ( input [ i + 3 * linksNum ] ) ; // dimless // double areasat = satsurf * area _ m2; mst = ( input [ i + 2 * linksNum ] ) / ( parameters . getS2max ( ) - ( input [ i + 3 * linksNum ] ) ) ; // dimless if ( Double . isInfinite ( mst ) ) { mst = MSTMAX ; } // if ( ( mst - mstold ) > 0.01 ) { // System . out . println ( " mst " + mst + " mstold " + mstold ) ; // mstold = mst ; // Ku = hillSlopesInfo . Ks ( currentHillslope ) // * ( Math . pow ( mst , hillSlopesInfo . MstExp ( currentHillslope ) ) ) ; / / // mphr /* HILLSLOPE S1 - SURFACE FLUX VALUES */ if ( prec_mphr < parameters . getKs ( ) ) { inf = ( 1.0 - satsurf ) * area_m2 * prec_mphr ; // m3phr qdh = 0.0 ; // m3phr } else { inf = ( 1.0 - satsurf ) * area_m2 * parameters . getKs ( ) ; // m3phr qdh = ( 1.0 - satsurf ) * area_m2 * ( prec_mphr - parameters . getKs ( ) ) ; // m3phr } Double eTrate = parameters . getETrate ( ) ; if ( etpArray != null ) { qe1 = etpArray [ i ] ; } else { if ( input [ i + 2 * linksNum ] > parameters . getS1residual ( ) ) { qe1 = eTrate * area_m2 * ( 1.0 - satsurf ) * mst ; // m3phr } else { qe1 = 0.0 ; } } /* HILLSLOPE S1 - S2 FLUX VALUE */ // re = 1100.0 // * ( input [ i + 2 * linksNum ] / parameters . getS2max ( ) ) // + 300.0 // * ( ( input [ i + 2 * linksNum ] / parameters . getS2max ( ) ) + 5) // * Math . pow ( ( input [ i + 3 * linksNum ] / parameters . getS2max ( ) ) , // 2.0 ) ; re = parameters . getKs ( ) * area_m2 * ( 1.0 - satsurf ) * ( Math . pow ( mst , parameters . getMstExp ( ) ) ) ; // m3phr /* HILLSLOPE S2 - SURFACE FLUX VALUES */ qds = satsurf * area_m2 * prec_mphr ; // m3phr if ( etpArray != null ) { qe2 = etpArray [ i ] ; } else { qe2 = eTrate * area_m2 * satsurf ; // m3phr , } qs = parameters . getRecParam ( ) * ( input [ i + 3 * linksNum ] ) ; // m3phr /* HILLSLOPE DIRECT RUNOFF ( TOTAL ) FLUXES */ // System . out . println ( " qdh = " + qdh ) ; // System . out . println ( " qds = " + qds ) ; qd = qdh + qds ; // m3phr if ( Double . isNaN ( qs ) || Double . isNaN ( qd ) ) { if ( Double . isNaN ( qs ) ) { throw new ModelsIllegalargumentException ( "Subsuperficial discharge for the hillslope " + currentHillslope . getHillslopeId ( ) + " " + i + " is NaN" , this . getClass ( ) . getSimpleName ( ) , pm ) ; } else { throw new ModelsIllegalargumentException ( "Timestep " + currentTimeInMinutes + "Superficial discharge for the hillslope " + currentHillslope . getHillslopeId ( ) + " " + i + " is NaN" + "\nValue of qdh " + qdh + "\nValue of qds " + qds + "\nPrecipitation " + prec_mphr + "\nSatsurf " + satsurf , this . getClass ( ) . getSimpleName ( ) , pm ) ; } } if ( isAtFinalSubtimestep ) { pm . message ( "timeinmin = " + currentTimeInMinutes + "\tbacino: " + i + "\tqdh = " + qdh + "\tqds = " + qds + "\tre = " + re + "\tqs = " + qs + "\tmst = " + mst + "\tinf = " + inf + "\tqe1 = " + qe1 + "\tqe2 = " + qe2 ) ; } /* * if the area is > 0.1 km2 , we consider the delay effect * of the hillslope . */ if ( area_m2 > THRESHOLD_AREA ) { // distribute the discharge int hillslopeId = currentHillslope . getHillslopeId ( ) ; ADischargeDistributor dischargeDistributor = hillslopeId2DischargeDistributor . get ( hillslopeId ) ; qs = dischargeDistributor . calculateSubsuperficialDischarge ( qs , satsurf , currentTimeInMillis ) ; qd = dischargeDistributor . calculateSuperficialDischarge ( qd , satsurf , currentTimeInMillis ) ; } /* LINK FLUX ( Q ) */ /* * Below , i = link # , j = id of connecting links , Array [ i ] [ j ] = link # for * connecting link */ /* LINK FLUX ( Q SUBSURFACE , BASE FLOW ) */ /* * Below , i = link # , j = id of connecting links , Array [ i ] [ j ] = link # for * connecting link */ Q_trib = 0.0D ; Qs_trib = 0.0D ; List < IHillSlope > connectedUpstreamHillSlopes = currentHillslope . getConnectedUpstreamElements ( ) ; if ( connectedUpstreamHillSlopes != null ) { for ( IHillSlope hillSlope : connectedUpstreamHillSlopes ) { PfafstetterNumber pNum = hillSlope . getPfafstetterNumber ( ) ; int index = orderedHillslopes . indexOf ( hillSlope ) ; boolean doCalculate = true ; for ( IDischargeContributor dContributor : dischargeContributorList ) { Double contributedDischarge = dContributor . getDischarge ( pNum . toString ( ) ) ; contributedDischarge = dContributor . mergeWithDischarge ( contributedDischarge , input [ index ] ) ; if ( ! isNovalue ( contributedDischarge ) ) { if ( doLog && doPrint ) { pm . message ( "----> For hillslope " + currentHillslope . getPfafstetterNumber ( ) + " using hydrometer/dams data in pfafstetter: " + pNum . toString ( ) + "(meaning added " + contributedDischarge + " instead of " + input [ index ] + ")" ) ; } double dischargeRatio = 0.3 ; // input [ index ] / ( input [ index ] + // input [ index + linksNum ] ) ; Q_trib = dischargeRatio * contributedDischarge ; // units m ^ 3 / s Qs_trib = contributedDischarge - Q_trib ; // units m ^ 3 / s doCalculate = false ; } } if ( doCalculate ) { // at the same position we can query the input array Q_trib += input [ index ] ; // units m ^ 3 / s Qs_trib += input [ index + linksNum ] ; // units m ^ 3 / s } } } double K_Q = AdigeUtilities . doRouting ( input [ i ] , currentHillslope , routingType ) ; /* * if ( i = = 62 ) { System . out . println ( " WD ratio = " + * linksHydraulicInfo . Width ( i ) / flowdepth ) ; System . out . println ( " * Mannings v ( m / s ) = " + * ( Math . pow ( hydrad , 2 . / 3 . ) * Math . pow ( linksHydraulicInfo . Slope ( i ) , 1/2 . ) / mannings _ n ) ) ; * System . out . println ( " K _ Q = " + * ( Math . pow ( hydrad , 2 . / 3 . ) * Math . pow ( linksHydraulicInfo . Slope ( i ) , 1/2 . ) / mannings _ n ) * * Math . pow ( linksHydraulicInfo . Length ( i ) , - 1 ) ) ; } */ if ( input [ i ] == 0.0D ) K_Q = 1e-10 ; if ( Double . isNaN ( qs ) || Double . isNaN ( qd ) ) { pm . errorMessage ( "Problems in basin: " + currentHillslope . getHillslopeId ( ) + " " + i ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ if ( area_m2 < THRESHOLD_AREA ) { qd = 0.0 ; qs = 0.0 ; inf = 0.0 ; qe1 = 0.0 ; qe2 = 0.0 ; re = 0.0 ; System . out . println ( "All the contributes are set to zero." ) ; } } /* OUTPUT */ if ( area_m2 > THRESHOLD_AREA ) { // LINK dQ / dt ; big ( ) term is m ^ 3 / s , 60 * K _ Q is 1 / min output [ i ] = 60.0D * K_Q * ( ( 1.0D / 3600. ) * qd + Q_trib - input [ i ] ) ; // 60.0 * K _ Q * ( Q _ trib - input [ i ] ) + ( 1.0 / 3600.0 ) * qd / deltaTinMinutes ; // LINK dQs / dt - > ( m ^ 3 / s ) / min output [ i + linksNum ] = 60.0 * K_Q * ( Qs_trib - input [ i + linksNum ] ) + 60.0 * K_Q * ( 1.0 / 3600. ) * ( qs ) ; // HILLSLOPE dS1 / dt - > m3 / min output [ i + ( 2 * linksNum ) ] = ( 1.0 / 60.0 ) * ( inf - re - qe1 ) ; // HILLSLOPE dS2 / dt - > m3 / min output [ i + ( 3 * linksNum ) ] = ( 1.0 / 60.0 ) * ( re - qs - qe2 ) ; } else { output [ i ] = 60.0D * K_Q * ( ( 1.0D / 3600. ) * qd + Q_trib - input [ i ] ) ; output [ i + linksNum ] = 60.0D * K_Q * ( ( 1.0D / 3600. ) * ( qs ) + Qs_trib - input [ i + linksNum ] ) ; output [ i + ( 2 * linksNum ) ] = ( 1.0D / 60.0 ) * ( inf - re - qe1 ) ; if ( output [ i + ( 2 * linksNum ) ] != output [ i + ( 2 * linksNum ) ] || output [ i + ( 2 * linksNum ) ] == 0.0 ) { throw new ModelsIllegalargumentException ( "Invalid value of S1, please check the parameters." + output [ i + ( 2 * linksNum ) ] , this , pm ) ; } output [ i + ( 3 * linksNum ) ] = ( 1.0D / 60.0 ) * ( re - qs - qe2 ) ; } if ( output [ i + ( 3 * linksNum ) ] != output [ i + ( 3 * linksNum ) ] || output [ i + ( 2 * linksNum ) ] == 0. ) { throw new ModelsIllegalargumentException ( "Invalid value of S2, please check the parameters." , this . getClass ( ) . getSimpleName ( ) , pm ) ; } } doPrint = false ; return output ;
public class RequestHttp1 { /** * Fills an ArrayList with the header values matching the key . * @ param values ArrayList which will contain the maching values . * @ param key the header key to select . */ @ Override public void getHeaderBuffers ( String key , ArrayList < CharSegment > values ) { } }
int i = - 1 ; while ( ( i = matchNextHeader ( i + 1 , key ) ) >= 0 ) { values . add ( _headerValues [ i ] ) ; }
public class MavenArtifactProvisionOption { /** * { @ inheritDoc } */ public MavenArtifactProvisionOption groupId ( final String groupId ) { } }
m_artifact . groupId ( groupId ) ; m_updateUsed = false ; return itself ( ) ;
public class ComponentType { /** * { @ inheritDoc } Get the type of a particular ( named ) property */ public Type getPropertyType ( String propertyName ) { } }
Type type = ( Type ) propertyTypes . get ( propertyName ) ; if ( null == type ) { Class < ? > propertyType = PropertyUtils . getPropertyType ( componentClass , propertyName ) ; if ( null != propertyType ) { return new IdentifierType ( propertyType ) ; } } return type ;
public class AbstractIntSet { /** * { @ inheritDoc } */ @ Override public void fill ( int from , int to ) { } }
if ( from > to ) throw new IndexOutOfBoundsException ( "from: " + from + " > to: " + to ) ; for ( int e = from ; e <= to ; e ++ ) add ( e ) ;
public class RDFDatabaseReader { /** * Iterates through the list of Seqs read in adding the parent node and * then adding children which belong to it . * @ param db The database to add nodes to . * @ param descriptionMap The list of RDF : Description nodes . * @ param seqMap The list of RDF : Seq nodes , which contain RDF : li nodes . * @ throws Exception on unrecoverable error . */ private void createParentChildRelationships ( Database db , HashMap < String , Account > descriptionMap , HashMap < String , ArrayList < String > > seqMap ) throws Exception { } }
// List of ID ' s used to avoid recursion ArrayList < String > parentIdStack = new ArrayList < String > ( ) ; // Verify the root node exists if ( ! seqMap . containsKey ( Account . ROOT_ACCOUNT_URI ) ) throw new Exception ( "File does not contain the root account, '" + Account . ROOT_ACCOUNT_URI + "'" ) ; parentIdStack . add ( Account . ROOT_ACCOUNT_URI ) ; // Until we run out of parent nodes . . . while ( parentIdStack . size ( ) > 0 ) { String parentId = parentIdStack . get ( 0 ) ; Account parentAccount = descriptionMap . get ( parentId ) ; parentIdStack . remove ( 0 ) ; // Attempt to add the parent node if it ' s not the root . Root already exists // in the database by default . if ( parentId . compareTo ( Account . ROOT_ACCOUNT_URI ) != 0 ) { if ( parentAccount != null ) { // If the parent node is not already in the db , add it if ( db . findAccountById ( parentId ) == null ) { Account parentParentAccount = db . findParent ( parentAccount ) ; if ( parentParentAccount == null ) { logger . warning ( "SeqNode[" + parentId + "] does not have a parent, will be dropped" ) ; parentAccount = null ; } } } else { logger . warning ( "SeqNode[" + parentId + "] does not have a matching RDF:Description node, it will be dropped" ) ; } } else { parentAccount = db . getRootAccount ( ) ; } // Now add the children if ( parentAccount != null ) { for ( String childId : seqMap . get ( parentId ) ) { Account childAccount = descriptionMap . get ( childId ) ; if ( childAccount != null ) { if ( ! parentAccount . hasChild ( childAccount ) ) { parentAccount . getChildren ( ) . add ( childAccount ) ; // If the child has children , add it to the parentIdStack for later processing , also mark // it as a folder ( which should have been done already based on it not having an algorithm . if ( seqMap . containsKey ( childAccount . getId ( ) ) ) { parentIdStack . add ( childId ) ; childAccount . setIsFolder ( true ) ; } } else { logger . warning ( "Duplicate child '" + childId + "' found of parent '" + parentAccount . getId ( ) + "'" ) ; } } else { logger . warning ( "Cannot find RDF:Description for '" + childId + "', it will be dropped" ) ; } } } }
public class ItemReferenceLink { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . cache . xalist . Task # postAbort ( com . ibm . ws . sib . msgstore . Transaction ) */ public final void abortAdd ( final PersistentTransaction transaction ) throws SevereMessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "abortAdd" , transaction ) ; long _referredID = getTuple ( ) . getReferredID ( ) ; super . abortAdd ( transaction ) ; if ( AbstractItem . NO_ID != _referredID ) { MessageStoreImpl msi = getMessageStoreImpl ( ) ; ItemLink itemLink = ( ItemLink ) msi . getMembership ( _referredID ) ; if ( null != itemLink ) { itemLink . rollbackIncrementReferenceCount ( transaction ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "reference to non-existent item: " + _referredID ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "abortAdd" ) ;
public class SynchroLogger { /** * Log a byte array as a hex dump . * @ param data byte array */ public static void log ( byte [ ] data ) { } }
if ( LOG != null ) { LOG . println ( ByteArrayHelper . hexdump ( data , true , 16 , "" ) ) ; LOG . flush ( ) ; }
public class PageFlowController { /** * Store this object in the user session , in the appropriate place . Used by the framework ; normally should not be * called directly . */ public void persistInSession ( HttpServletRequest request , HttpServletResponse response ) { } }
PageFlowController currentPageFlow = PageFlowUtils . getCurrentPageFlow ( request , getServletContext ( ) ) ; // This code implicitly destroys the current page flow . In order to prevent multiple threads // from executing inside the JPF at once , synchronize on it in order to complete the " destroy " // atomically . if ( currentPageFlow != null && ! currentPageFlow . isOnNestingStack ( ) ) { synchronized ( currentPageFlow ) { InternalUtils . setCurrentPageFlow ( this , request , getServletContext ( ) ) ; } } // Here , there is no previous page flow to syncyronize upon before destruction else { InternalUtils . setCurrentPageFlow ( this , request , getServletContext ( ) ) ; }
public class AutoJsonRpcClientProxyCreator { /** * Appends the base path to the path found in the interface . */ private String appendBasePath ( String path ) { } }
try { return new URL ( baseUrl , path ) . toString ( ) ; } catch ( MalformedURLException e ) { throw new IllegalArgumentException ( format ( "Cannot combine URLs '%s' and '%s' to valid URL." , baseUrl , path ) , e ) ; }
public class RestTemplate { /** * DELETE */ public void delete ( String url , Object ... urlVariables ) throws RestClientException { } }
execute ( url , HttpMethod . DELETE , null , null , urlVariables ) ;
public class Section508Compliance { /** * implements the visitor to reset the stack * @ param obj * the context object for the currently visited code block */ @ Override public void visitCode ( Code obj ) { } }
stack . resetForMethodEntry ( this ) ; localLabels . clear ( ) ; super . visitCode ( obj ) ; for ( SourceLineAnnotation sla : localLabels . values ( ) ) { BugInstance bug = new BugInstance ( this , BugType . S508C_NO_SETLABELFOR . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) ; if ( sla != null ) { bug . addSourceLine ( sla ) ; } bugReporter . reportBug ( bug ) ; }
public class DTDEntityAttr { /** * Method called by the validator object * to ask attribute to verify that the default it has ( if any ) is * valid for such type . */ @ Override public void validateDefault ( InputProblemReporter rep , boolean normalize ) throws XMLStreamException { } }
String normStr = validateDefaultName ( rep , normalize ) ; if ( normalize ) { mDefValue . setValue ( normStr ) ; } // Ok , but was it declared ? /* 03 - Dec - 2004 , TSa : This is rather ugly - - need to know we * actually really get a DTD reader , and DTD reader needs * to expose a special method . . . but it gets things done . */ EntityDecl ent = ( ( MinimalDTDReader ) rep ) . findEntity ( normStr ) ; checkEntity ( rep , normStr , ent ) ;
public class NavMesh { /** * / it does not validate the reference . */ Tupple2 < MeshTile , Poly > getTileAndPolyByRefUnsafe ( long ref ) { } }
int [ ] saltitip = decodePolyId ( ref ) ; int it = saltitip [ 1 ] ; int ip = saltitip [ 2 ] ; return new Tupple2 < > ( m_tiles [ it ] , m_tiles [ it ] . data . polys [ ip ] ) ;
public class PreDestroyMonitor { /** * allows late - binding of scopes to PreDestroyMonitor , useful if more than one * Injector contributes scope bindings * @ param bindings additional annotation - to - scope bindings to add */ public void addScopeBindings ( Map < Class < ? extends Annotation > , Scope > bindings ) { } }
if ( scopeCleaner . isRunning ( ) ) { scopeBindings . putAll ( bindings ) ; }
public class MultiMap { /** * Stores a { @ code key } - { @ code value } pair . */ public void put ( K key , V value ) { } }
Set < V > set = storage . get ( key ) ; if ( set == null ) { set = new HashSet < > ( ) ; storage . put ( key , set ) ; } set . add ( value ) ;
public class MpScheduler { /** * Just using " put " on the dup counter map is unsafe . * It won ' t detect the case where keys collide from two different transactions . */ void safeAddToDuplicateCounterMap ( long dpKey , DuplicateCounter counter ) { } }
DuplicateCounter existingDC = m_duplicateCounters . get ( dpKey ) ; if ( existingDC != null ) { // this is a collision and is bad existingDC . logWithCollidingDuplicateCounters ( counter ) ; VoltDB . crashGlobalVoltDB ( "DUPLICATE COUNTER MISMATCH: two duplicate counter keys collided." , true , null ) ; } else { m_duplicateCounters . put ( dpKey , counter ) ; }
public class UserResource { /** * User confirm changing email . * The token is verified and the temporary stored email will not be permanently saved as the users email . * @ param userId unique email * @ return 204 if success */ @ POST @ Path ( "{id}/email/confirm" ) @ PermitAll public Response confirmChangeEmail ( @ PathParam ( "id" ) Long userId , EmailRequest request ) { } }
checkNotNull ( userId ) ; checkNotNull ( request . getToken ( ) ) ; boolean isSuccess = userService . confirmEmailAddressChangeUsingToken ( userId , request . getToken ( ) ) ; return isSuccess ? Response . noContent ( ) . build ( ) : Response . status ( Response . Status . BAD_REQUEST ) . build ( ) ;
public class BoxFile { /** * Used to retrieve all collaborations associated with the item . * @ param fields the optional fields to retrieve . * @ return An iterable of metadata instances associated with the item . */ public BoxResourceIterable < BoxCollaboration . Info > getAllFileCollaborations ( String ... fields ) { } }
return BoxCollaboration . getAllFileCollaborations ( this . getAPI ( ) , this . getID ( ) , GET_COLLABORATORS_PAGE_SIZE , fields ) ;
public class CmsDriverManager { /** * Returns the project driver for a given DB context . < p > * @ param dbc the database context * @ return the project driver for the database context */ public I_CmsProjectDriver getProjectDriver ( CmsDbContext dbc ) { } }
if ( ( dbc == null ) || ( dbc . getProjectId ( ) == null ) || dbc . getProjectId ( ) . isNullUUID ( ) ) { return m_projectDriver ; } I_CmsProjectDriver driver = dbc . getProjectDriver ( dbc . getProjectId ( ) ) ; return driver != null ? driver : m_projectDriver ;
public class FnFunc { /** * Builds a function that will execute the specified function < tt > thenFunction < / tt > * only if the result of executing < tt > condition < / tt > on the target object is true , * and will execute the specified function < tt > elseFunction < / tt > otherwise . * The built function can effectively change the target type ( receive < tt > T < / tt > and * return < tt > R < / tt > ) if both < tt > thenFunction < / tt > and < tt > elseFunction < / tt > return * the same type , and this is different than the target type < tt > T < / tt > . * @ param targetType the target type * @ param condition the condition to be executed on the target object * @ param thenFunction the function to be executed on the target object if condition is true * @ param elseFunction the function to be executed on the target object otherwise * @ return a function that executes the " thenFunction " if " condition " is true and " elseFunction " otherwise . */ public static final < T , R > Function < T , R > ifTrueThenElse ( final Type < T > targetType , final IFunction < ? super T , Boolean > condition , final IFunction < ? super T , R > thenFunction , final IFunction < ? super T , R > elseFunction ) { } }
return new IfThenElse < T , R > ( true , targetType , condition , thenFunction , elseFunction ) ;
public class UrlStringBuilder { /** * Sets a URL parameter , replacing any existing parameter with the same name . * @ param name Parameter name , can not be null * @ param values Values for the parameter , null is valid * @ return this */ public UrlStringBuilder setParameter ( String name , String ... values ) { } }
this . setParameter ( name , values != null ? Arrays . asList ( values ) : null ) ; return this ;
public class RebindConfiguration { /** * Return a { @ link MapperInstance } instantiating the deserializer for the given type * @ param type a { @ link com . google . gwt . core . ext . typeinfo . JType } object . * @ return a { @ link com . google . gwt . thirdparty . guava . common . base . Optional } object . */ public Optional < MapperInstance > getDeserializer ( JType type ) { } }
return Optional . fromNullable ( deserializers . get ( type . getQualifiedSourceName ( ) ) ) ;
public class FluentValidator { /** * 在某个数组对象上通过 { @ link com . baidu . unbiz . fluentvalidator . annotation . FluentValidate } 注解方式的验证 , * 需要保证 { @ link # configure ( Registry ) } 已经先执行配置完毕 < code > Registry < / code > * 注 : 当数组为空时 , 则会跳过 * @ param t 待验证对象 * @ return FluentValidator */ public < T > FluentValidator onEach ( T [ ] t ) { } }
if ( ArrayUtil . isEmpty ( t ) ) { lastAddCount = 0 ; return this ; } return onEach ( Arrays . asList ( t ) ) ;
public class CommitsApi { /** * Get a list of repository commits in a project . * < pre > < code > GitLab Endpoint : GET / projects / : id / repository / commits < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param ref the name of a repository branch or tag or if not given the default branch * @ param since only commits after or on this date will be returned * @ param until only commits before or on this date will be returned * @ param path the path to file of a project * @ return a list containing the commits for the specified project ID * @ throws GitLabApiException GitLabApiException if any exception occurs during execution */ public List < Commit > getCommits ( Object projectIdOrPath , String ref , Date since , Date until , String path ) throws GitLabApiException { } }
return ( getCommits ( projectIdOrPath , ref , since , until , path , getDefaultPerPage ( ) ) . all ( ) ) ;
public class ZProxy { /** * Sends a command message to the proxy actor . * Can be useful for programmatic interfaces . * Does not works with commands { @ link Command # CONFIG CONFIG } and { @ link Command # RESTART RESTART } . * @ param command the command to execute . * @ param sync true to read the status in synchronous way , false for asynchronous mode * @ return the read state */ public State command ( Command command , boolean sync ) { } }
return State . valueOf ( command ( command . name ( ) , sync ) ) ;
public class MessageFieldDesc { /** * Move the correct fields from this record to the map . * If this method is used , is must be overidden to move the correct fields . */ public int getRawRecordData ( Rec record ) { } }
Field fieldInfo = record . getField ( this . getKey ( ) ) ; if ( fieldInfo != null ) return this . getRawFieldData ( fieldInfo ) ; else return Constant . NORMAL_RETURN ;
public class CacheProviderWrapper { /** * Returns the value to which this map maps the specified cache id . Returns * < tt > null < / tt > if the map contains no mapping for this key . * @ param id cache id whose associated value is to be returned . * @ param template template name associated with cache id ( No effect on CoreCache ) * @ param askPermission True implies that execution must ask the coordinating CacheUnit for permission ( No effect on CoreCache ) . * @ param ignoreCounting True implies that no counting for PMI and cache statistics ( No effect on CoreCache ) * @ return the value to which this map maps the specified cache id , or * < tt > null < / tt > if the map contains no mapping for this cache id . */ @ Override public Object getValue ( Object id , String template , boolean askPermission , boolean ignoreCounting ) { } }
final String methodName = "getValue()" ; Object value = null ; com . ibm . websphere . cache . CacheEntry ce = this . coreCache . get ( id ) ; if ( ce != null ) { value = ce . getValue ( ) ; } if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " id=" + id + " value=" + value ) ; } return value ;
public class URLUtil { /** * 获得URL , 常用于使用绝对路径时的情况 * @ param file URL对应的文件对象 * @ return URL * @ exception UtilException MalformedURLException */ public static URL getURL ( File file ) { } }
Assert . notNull ( file , "File is null !" ) ; try { return file . toURI ( ) . toURL ( ) ; } catch ( MalformedURLException e ) { throw new UtilException ( e , "Error occured when get URL!" ) ; }
public class OptionsMethodProcessor { /** * < p > getSupportProduceMediaType . < / p > * @ param containerRequestContext a { @ link javax . ws . rs . container . ContainerRequestContext } object . * @ return a { @ link javax . ws . rs . core . MediaType } object . */ protected static MediaType getSupportProduceMediaType ( ContainerRequestContext containerRequestContext ) { } }
for ( MediaType mediaType : containerRequestContext . getAcceptableMediaTypes ( ) ) { if ( mediaType . isCompatible ( TEXT_PLAIN_TYPE ) || ( mediaType . getType ( ) . equalsIgnoreCase ( "application" ) && ! mediaType . getSubtype ( ) . toLowerCase ( ) . contains ( "html" ) ) || mediaType . getSubtype ( ) . equalsIgnoreCase ( "json" ) || mediaType . getSubtype ( ) . equalsIgnoreCase ( "xml" ) || mediaType . getSubtype ( ) . toLowerCase ( ) . endsWith ( "+json" ) || mediaType . getSubtype ( ) . toLowerCase ( ) . endsWith ( "+xml" ) ) { return mediaType ; } } return null ;
public class MiscUtils { /** * Uses androids android . util . Patterns . WEB _ URL to check if an url is valid . * @ param url Address to check * @ return true if the < code > url < / code > is a valid web address . */ public final static boolean isValidURL ( String url ) { } }
if ( url == null ) { return false ; } else { return Patterns . WEB_URL . matcher ( url ) . matches ( ) ; }
public class KunderaCriteriaBuilder { /** * ( non - Javadoc ) * @ see * javax . persistence . criteria . CriteriaBuilder # avg ( javax . persistence . criteria * . Expression ) */ @ Override public < N extends Number > Expression < Double > avg ( Expression < N > arg0 ) { } }
// TODO Auto - generated method stub return null ;
public class DateTimeField { /** * Set to the min or max value . * @ param iAreaDesc END _ SELECT _ KEY means set to largest value , others mean smallest . */ public void setToLimit ( int iAreaDesc ) // Set this field to the largest or smallest value { } }
// By default compare as ASCII strings m_calendar . set ( DBConstants . FIRST_YEAR , Calendar . JANUARY , 1 , 0 , 0 , 0 ) ; if ( iAreaDesc == DBConstants . END_SELECT_KEY ) m_calendar . set ( DBConstants . LAST_YEAR , Calendar . DECEMBER , 31 , 23 , 59 , 59 ) ; // Highest value java . util . Date time = m_calendar . getTime ( ) ; // Lowest value this . doSetData ( time , DBConstants . DONT_DISPLAY , DBConstants . SCREEN_MOVE ) ;
public class HtmlTool { /** * Finds a set of elements through a CSS selector and wraps them with the * received wrapper element . * @ param root * root element for the selection * @ param selector * CSS selector for the elements to wrap * @ param wrapper * HTML to use for wrapping the selected elements */ public final void wrap ( final Element root , final String selector , final String wrapper ) { } }
final Iterable < Element > elements ; // Selected elements checkNotNull ( root , "Received a null pointer as root element" ) ; checkNotNull ( selector , "Received a null pointer as selector" ) ; checkNotNull ( wrapper , "Received a null pointer as HTML wrap" ) ; // Selects and iterates over the elements elements = root . select ( selector ) ; for ( final Element element : elements ) { element . wrap ( wrapper ) ; }
public class VdmDropAdapterAssistent { /** * Opens an error dialog if necessary . Takes care of complex rules necessary * for making the error dialog look nice . */ private void openError ( IStatus status ) { } }
if ( status == null ) { return ; } String genericTitle = WorkbenchNavigatorMessages . DropAdapter_title ; int codes = IStatus . ERROR | IStatus . WARNING ; // simple case : one error , not a multistatus if ( ! status . isMultiStatus ( ) ) { ErrorDialog . openError ( getShell ( ) , genericTitle , null , status , codes ) ; return ; } // one error , single child of multistatus IStatus [ ] children = status . getChildren ( ) ; if ( children . length == 1 ) { ErrorDialog . openError ( getShell ( ) , status . getMessage ( ) , null , children [ 0 ] , codes ) ; return ; } // several problems ErrorDialog . openError ( getShell ( ) , genericTitle , null , status , codes ) ;
public class Jsoup { /** * Parse a fragment of HTML , with the assumption that it forms the { @ code body } of the HTML . * @ param bodyHtml body HTML fragment * @ param baseUri URL to resolve relative URLs against . * @ return sane HTML document * @ see Document # body ( ) */ public static Document parseBodyFragment ( String bodyHtml , String baseUri ) { } }
return Parser . parseBodyFragment ( bodyHtml , baseUri ) ;
public class MicroMetaDao { /** * 锟斤拷荼锟斤拷锟斤拷询 */ public List < Map < String , Object > > queryObjByCondition ( String tableName , String condition , String cols , String orders , Object [ ] paramArray , int [ ] typeArray ) { } }
// String tableName = changeTableNameCase ( otableName ) ; /* JdbcTemplate jdbcTemplate = ( JdbcTemplate ) MicroDbHolder . getDbSource ( dbName ) ; */ JdbcTemplate jdbcTemplate = getMicroJdbcTemplate ( ) ; String sql = "select " + cols + " from " + tableName + " where " + condition + " order by " + orders ; logger . debug ( sql ) ; logger . debug ( Arrays . toString ( paramArray ) ) ; List < Map < String , Object > > retList0 = jdbcTemplate . queryForList ( sql , paramArray , typeArray ) ; // add 201807 ning // List < Map < String , Object > > retList = changeOutKeyCase4List ( retList0 ) ; // add 201902 ning List < Map < String , Object > > retList = ignoreKeyCase ( ( List ) retList0 ) ; return retList ;
public class FontDescriptorSpecificationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_WT_CLASS : setFtWtClass ( ( Integer ) newValue ) ; return ; case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_WD_CLASS : setFtWdClass ( ( Integer ) newValue ) ; return ; case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_HEIGHT : setFtHeight ( ( Integer ) newValue ) ; return ; case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_WIDTH : setFtWidth ( ( Integer ) newValue ) ; return ; case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_DS_FLAGS : setFtDsFlags ( ( Integer ) newValue ) ; return ; case AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_US_FLAGS : setFtUsFlags ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ChangesHolder { /** * Returns the store parameter extracted from the flags . * @ param flags the flags of the Lucene field . * @ return the store parameter corresponding to the given flags . */ private static Field . Store getStoreParameter ( int flags ) { } }
if ( ( flags & STORED_FLAG ) > 0 ) { return Field . Store . YES ; } else { return Field . Store . NO ; }
public class ListComplianceSummariesResult { /** * A list of compliant and non - compliant summary counts based on compliance types . For example , this call returns * State Manager associations , patches , or custom compliance types according to the filter criteria that you * specified . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setComplianceSummaryItems ( java . util . Collection ) } or * { @ link # withComplianceSummaryItems ( java . util . Collection ) } if you want to override the existing values . * @ param complianceSummaryItems * A list of compliant and non - compliant summary counts based on compliance types . For example , this call * returns State Manager associations , patches , or custom compliance types according to the filter criteria * that you specified . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListComplianceSummariesResult withComplianceSummaryItems ( ComplianceSummaryItem ... complianceSummaryItems ) { } }
if ( this . complianceSummaryItems == null ) { setComplianceSummaryItems ( new com . amazonaws . internal . SdkInternalList < ComplianceSummaryItem > ( complianceSummaryItems . length ) ) ; } for ( ComplianceSummaryItem ele : complianceSummaryItems ) { this . complianceSummaryItems . add ( ele ) ; } return this ;
public class ReflectionUtil { /** * 获取所有包含指定 < code > Annotation < / code > 的 < code > Field < / code > 数组 * @ param clazz 查找类 * @ param annotationClass 注解类名 * @ return < code > Field < / code > 数组 */ public static Field [ ] getAnnotationFields ( Class < ? > clazz , Class < ? extends Annotation > annotationClass ) { } }
if ( clazz == null || annotationClass == null ) { return null ; } List < Field > fields = getAllFieldsOfClass0 ( clazz ) ; if ( CollectionUtil . isEmpty ( fields ) ) { return null ; } List < Field > list = CollectionUtil . createArrayList ( ) ; for ( Field field : fields ) { if ( null != field . getAnnotation ( annotationClass ) ) { list . add ( field ) ; field . setAccessible ( true ) ; } } return list . toArray ( new Field [ 0 ] ) ;
public class FileUtil { /** * Loads bytes of the given file . * @ return Bytes of the given file . */ public static byte [ ] readFile ( File file ) throws IOException { } }
// Open file RandomAccessFile f = new RandomAccessFile ( file , "r" ) ; try { // Get and check length long longlength = f . length ( ) ; int length = ( int ) longlength ; if ( length != longlength ) { throw new IOException ( "File size >= 2 GB" ) ; } // Read file and return data byte [ ] data = new byte [ length ] ; f . readFully ( data ) ; return data ; } finally { // Close file f . close ( ) ; }
public class Serializer { /** * Reads a serializable object . * @ param id The serializable type ID . * @ param buffer The buffer from which to read the object . * @ param < T > The object type . * @ return The read object . */ @ SuppressWarnings ( "unchecked" ) private < T > T readById ( int id , BufferInput < ? > buffer ) { } }
Class < T > type = ( Class < T > ) registry . type ( id ) ; if ( type == null ) throw new SerializationException ( "cannot deserialize: unknown type" ) ; TypeSerializer < T > serializer = getSerializer ( type ) ; if ( serializer == null ) throw new SerializationException ( "cannot deserialize: unknown type" ) ; return serializer . read ( type , buffer , this ) ;
public class DefaultTemplatedURLFormatter { /** * Format the given URL using a URL template , if defined in a URL * template config file . The { @ link org . apache . beehive . netui . core . urls . URIContext } * encapsulates some additional data needed to write out the string form . * E . g . It defines if the & quot ; & amp ; amp ; & quot ; entity or the * ' & amp ; ' character should be used to separate quary parameters . * @ param servletContext the current ServletContext . * @ param request the current ServletRequest . * @ param uri the MutableURI to be formatted into a String . * @ param key key for the URL template type to use for formatting the URI * @ param uriContext data required to write out the string form . * @ return the URL as a < code > String < / code > */ public String getTemplatedURL ( ServletContext servletContext , ServletRequest request , MutableURI uri , String key , URIContext uriContext ) { } }
// Look for the template config and get the right template . // If it is found , apply the value to the template . String result = null ; URLTemplate template = null ; URLTemplatesFactory factory = URLTemplatesFactory . getURLTemplatesFactory ( request ) ; if ( factory != null ) { String templateName = factory . getTemplateNameByRef ( DEFAULT_TEMPLATE_REF , key ) ; if ( templateName != null ) { template = factory . getURLTemplate ( templateName ) ; } } if ( template != null ) { result = formatURIWithTemplate ( request , uri , uriContext , template ) ; } else { // no template found , just return the uri as a String . . . result = uri . getURIString ( uriContext ) ; } return result ;
public class JsonEscape { /** * Perform a ( configurable ) JSON < strong > escape < / strong > operation on a < tt > Reader < / tt > input , * writing results to a < tt > Writer < / tt > . * This method will perform an escape operation according to the specified * { @ link JsonEscapeType } and * { @ link JsonEscapeLevel } argument values . * All other < tt > Reader < / tt > / < tt > Writer < / tt > - based < tt > escapeJson * ( . . . ) < / tt > methods call this one with preconfigured * < tt > type < / tt > and < tt > level < / tt > values . * This method is < strong > thread - safe < / strong > . * @ param reader the < tt > Reader < / tt > reading the text to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param type the type of escape operation to be performed , see * { @ link JsonEscapeType } . * @ param level the escape level to be applied , see { @ link JsonEscapeLevel } . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeJson ( final Reader reader , final Writer writer , final JsonEscapeType type , final JsonEscapeLevel level ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( type == null ) { throw new IllegalArgumentException ( "The 'type' argument cannot be null" ) ; } if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } JsonEscapeUtil . escape ( reader , writer , type , level ) ;
public class SingletonActivationStrategy { /** * Internal method used by subclasses to activate a bean */ protected BeanO doActivation ( EJBThreadData threadData , ContainerTx tx , BeanId beanId , boolean takeInvocationRef ) throws RemoteException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "doActivation" , new Object [ ] { tx , beanId , new Boolean ( takeInvocationRef ) } ) ; } BeanO bean = null ; Throwable exception = null ; MasterKey key = new MasterKey ( beanId ) ; boolean activate = false ; boolean pushedCallbackBeanO = false ; try { synchronized ( locks . getLock ( key ) ) { if ( ( bean = ( BeanO ) cache . find ( key ) ) == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Bean not in cache" ) ; bean = beanId . getHome ( ) . createBeanO ( threadData , tx , beanId ) ; // d630940 pushedCallbackBeanO = true ; cache . insert ( key , bean ) ; bean . ivCacheKey = key ; // d199233 activate = true ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Found bean in cache" ) ; // Set the found BeanO as the ' Callback ' BeanO , as this is the // BeanO that is becoming the active beanO for the thread . // This will allow methods called by customer code ( like Timer // methods ) to determine the state of the BeanO that is making // the call . d168509 threadData . pushCallbackBeanO ( bean ) ; // d630940 pushedCallbackBeanO = true ; } } boolean pin = false ; if ( activate ) { bean . activate ( beanId , tx ) ; // d114677 } pin = bean . enlist ( tx ) ; // d114677 if ( takeInvocationRef && pin ) { // We need to take an additional reference cache . pin ( key ) ; } else if ( ! takeInvocationRef && ! pin ) { // Need to drop reference taken by find or insert cache . unpin ( key ) ; } } catch ( RemoteException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".doActivation" , "123" , this ) ; exception = e ; throw e ; } catch ( RuntimeException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".doActivation" , "129" , this ) ; exception = e ; throw e ; } finally { if ( exception != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "doActivation: exception raised" , exception ) ; } if ( exception != null && bean != null ) { if ( pushedCallbackBeanO ) { threadData . popCallbackBeanO ( ) ; } bean . destroy ( ) ; if ( activate ) { // Synchronize to insure that a temp pin obtained by getBean // doesn ' t cause the remove to fail due to too many pins . PQ53065 synchronized ( locks . getLock ( key ) ) { cache . remove ( key , true ) ; bean . ivCacheKey = null ; // d199233 } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "doActivation" , bean ) ; } return bean ;
public class SaverDef { /** * < pre > * The name of the tensor in which to specify the filename when saving or * restoring a model checkpoint . * < / pre > * < code > optional string filename _ tensor _ name = 1 ; < / code > */ public com . google . protobuf . ByteString getFilenameTensorNameBytes ( ) { } }
java . lang . Object ref = filenameTensorName_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; filenameTensorName_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class RuntimeModelIo { /** * Loads an application from a directory . * @ param projectDirectory the project directory * @ param appDescriptor the application ' s descriptor * @ param result the result to populate * @ return a load result ( never null ) */ private static ApplicationLoadResult loadApplication ( File projectDirectory , ApplicationTemplateDescriptor appDescriptor , ApplicationLoadResult result ) { } }
ApplicationTemplate app = result . applicationTemplate ; result . applicationTemplate . setDirectory ( projectDirectory ) ; // Load the graph File graphDirectory = new File ( projectDirectory , Constants . PROJECT_DIR_GRAPH ) ; GRAPH : if ( ! graphDirectory . exists ( ) ) { RoboconfError error = new RoboconfError ( ErrorCode . PROJ_NO_GRAPH_DIR , directory ( projectDirectory ) ) ; result . loadErrors . add ( error ) ; } else if ( appDescriptor != null && ! Utils . isEmptyOrWhitespaces ( appDescriptor . getGraphEntryPoint ( ) ) ) { File mainGraphFile = new File ( graphDirectory , appDescriptor . getGraphEntryPoint ( ) ) ; if ( ! mainGraphFile . exists ( ) ) { RoboconfError error = new RoboconfError ( ErrorCode . PROJ_MISSING_GRAPH_EP , expected ( mainGraphFile . getAbsolutePath ( ) ) ) ; result . loadErrors . add ( error ) ; break GRAPH ; } Graphs graphs = loadGraph ( mainGraphFile , graphDirectory , result ) ; app . setGraphs ( graphs ) ; } // Load the instances File instDirectory = new File ( projectDirectory , Constants . PROJECT_DIR_INSTANCES ) ; INST : if ( appDescriptor != null && instDirectory . exists ( ) ) { if ( app . getGraphs ( ) == null ) { result . loadErrors . add ( new RoboconfError ( ErrorCode . CO_GRAPH_COULD_NOT_BE_BUILT ) ) ; break INST ; } if ( Utils . isEmptyOrWhitespaces ( appDescriptor . getInstanceEntryPoint ( ) ) ) break INST ; File mainInstFile = new File ( instDirectory , appDescriptor . getInstanceEntryPoint ( ) ) ; InstancesLoadResult ilr = loadInstances ( mainInstFile , instDirectory , app . getGraphs ( ) , app . getName ( ) ) ; result . getParsedFiles ( ) . addAll ( ilr . getParsedFiles ( ) ) ; result . objectToSource . putAll ( ilr . getObjectToSource ( ) ) ; result . loadErrors . addAll ( ilr . getLoadErrors ( ) ) ; app . getRootInstances ( ) . addAll ( ilr . getRootInstances ( ) ) ; } // Commands File commandsDirectory = new File ( projectDirectory , Constants . PROJECT_DIR_COMMANDS ) ; List < String > commandNames = new ArrayList < > ( ) ; if ( app . getGraphs ( ) != null && commandsDirectory . exists ( ) ) { for ( File f : Utils . listAllFiles ( commandsDirectory ) ) { if ( ! f . getName ( ) . endsWith ( Constants . FILE_EXT_COMMANDS ) ) { result . loadErrors . add ( new RoboconfError ( ErrorCode . PROJ_INVALID_COMMAND_EXT ) ) ; } else { CommandsParser parser = new CommandsParser ( app , f ) ; result . loadErrors . addAll ( parser . getParsingErrors ( ) ) ; commandNames . add ( f . getName ( ) . replace ( Constants . FILE_EXT_COMMANDS , "" ) ) ; } } } // Autonomic File autonomicRulesDirectory = new File ( projectDirectory , Constants . PROJECT_DIR_RULES_AUTONOMIC ) ; if ( app . getGraphs ( ) != null && autonomicRulesDirectory . exists ( ) ) { for ( File f : Utils . listAllFiles ( autonomicRulesDirectory ) ) { if ( ! f . getName ( ) . endsWith ( Constants . FILE_EXT_RULE ) ) { result . loadErrors . add ( new RoboconfError ( ErrorCode . PROJ_INVALID_RULE_EXT ) ) ; } else { // Parsing errors RuleParser parser = new RuleParser ( f ) ; result . loadErrors . addAll ( parser . getParsingErrors ( ) ) ; // Invalid references to commands ? List < String > coll = new ArrayList < > ( parser . getRule ( ) . getCommandsToInvoke ( ) ) ; coll . removeAll ( commandNames ) ; for ( String commandName : coll ) result . loadErrors . add ( new RoboconfError ( ErrorCode . RULE_UNKNOWN_COMMAND , name ( commandName ) ) ) ; } } } // Check for files that are not reachable or not in the right directories if ( projectDirectory . isDirectory ( ) ) { String [ ] exts = { Constants . FILE_EXT_GRAPH , Constants . FILE_EXT_INSTANCES } ; File [ ] directories = { graphDirectory , instDirectory } ; for ( int i = 0 ; i < exts . length ; i ++ ) { List < File > files = Utils . listAllFiles ( projectDirectory , exts [ i ] ) ; List < File > filesWithInvalidLocation = new ArrayList < > ( ) ; for ( File f : files ) { if ( ! Utils . isAncestor ( directories [ i ] , f ) ) { result . loadErrors . add ( new ParsingError ( ErrorCode . PROJ_INVALID_FILE_LOCATION , f , 1 ) ) ; filesWithInvalidLocation . add ( f ) ; } } files . removeAll ( result . getParsedFiles ( ) ) ; files . removeAll ( filesWithInvalidLocation ) ; for ( File f : files ) result . loadErrors . add ( new ParsingError ( ErrorCode . PROJ_UNREACHABLE_FILE , f , 1 ) ) ; } } // Validate the entire application if ( ! RoboconfErrorHelpers . containsCriticalErrors ( result . loadErrors ) ) { Collection < ModelError > errors = RuntimeModelValidator . validate ( app ) ; result . loadErrors . addAll ( errors ) ; } return result ;
public class EarDescriptorBuilder { /** * Wrires any module ( generates element ) . * @ return element to write data to */ private Element writeModule ( ) { } }
Element element = doc . createElement ( "module" ) ; rootElement . appendChild ( element ) ; return element ;
public class BuildDataHelper { /** * Gets SHA1 from the build . * @ param build * @ return SHA1 of the las * @ throws IOException Cannot get the info about commit ID */ @ Nonnull public static ObjectId getCommitSHA1 ( @ Nonnull Run < ? , ? > build ) throws IOException { } }
List < BuildData > buildDataList = build . getActions ( BuildData . class ) ; Job < ? , ? > parent = build . getParent ( ) ; BuildData buildData = calculateBuildData ( parent . getName ( ) , parent . getFullName ( ) , buildDataList ) ; if ( buildData == null ) { throw new IOException ( Messages . BuildDataHelper_NoBuildDataError ( ) ) ; } // buildData ? . lastBuild ? . marked and fall back to . revision with null check everywhere to be defensive Build b = buildData . lastBuild ; if ( b != null ) { Revision r = b . marked ; if ( r == null ) { r = b . revision ; } if ( r != null ) { return r . getSha1 ( ) ; } } // Nowhere to report = > fail the build throw new IOException ( Messages . BuildDataHelper_NoLastRevisionError ( ) ) ;
public class ModbusTCPTransaction { /** * incrementTransactionID - - Increment the transaction ID for the next * transaction . Note that the caller must get the new transaction ID with * getTransactionID ( ) . This is only done validity checking is enabled so * that dumb slaves don ' t cause problems . The original request will have its * transaction ID incremented as well so that sending the same transaction * again won ' t cause problems . */ private synchronized void incrementTransactionID ( ) { } }
if ( isCheckingValidity ( ) ) { if ( transactionID >= Modbus . MAX_TRANSACTION_ID ) { transactionID = Modbus . DEFAULT_TRANSACTION_ID ; } else { transactionID ++ ; } } request . setTransactionID ( getTransactionID ( ) ) ;
public class LongHashMap { /** * Resize the table to hold given capacity . * @ param newCapacity the new capacity , must be a power of two . */ private void resize ( int newCapacity ) { } }
// assert ( newCapacity & - newCapacity ) = = newCapacity ; / / power of 2 int newLength = newCapacity * 2 ; long [ ] oldTable = table ; int oldLength = oldTable . length ; if ( oldLength == 2 * MAXIMUM_CAPACITY ) { // can ' t expand any further if ( threshold == MAXIMUM_CAPACITY - 1 ) throw new IllegalStateException ( "Capacity exhausted." ) ; threshold = MAXIMUM_CAPACITY - 1 ; // Gigantic map ! return ; } if ( oldLength >= newLength ) return ; long [ ] newTable = new long [ newLength ] ; threshold = ( newCapacity * 3 ) / 4 ; for ( int j = 0 ; j < oldLength ; j += 2 ) { long key = oldTable [ j ] ; if ( key != 0 ) { long value = oldTable [ j + 1 ] ; int i = hash ( key , newLength ) ; while ( newTable [ i ] != 0 ) i = nextKeyIndex ( i , newLength ) ; newTable [ i ] = key ; newTable [ i + 1 ] = value ; } } table = newTable ;
public class SnapshotClient { /** * Sets the access control policy on the specified resource . Replaces any existing policy . * < p > Sample code : * < pre > < code > * try ( SnapshotClient snapshotClient = SnapshotClient . create ( ) ) { * ProjectGlobalSnapshotResourceName resource = ProjectGlobalSnapshotResourceName . of ( " [ PROJECT ] " , " [ RESOURCE ] " ) ; * GlobalSetPolicyRequest globalSetPolicyRequestResource = GlobalSetPolicyRequest . newBuilder ( ) . build ( ) ; * Policy response = snapshotClient . setIamPolicySnapshot ( resource , globalSetPolicyRequestResource ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ param globalSetPolicyRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy setIamPolicySnapshot ( ProjectGlobalSnapshotResourceName resource , GlobalSetPolicyRequest globalSetPolicyRequestResource ) { } }
SetIamPolicySnapshotHttpRequest request = SetIamPolicySnapshotHttpRequest . newBuilder ( ) . setResource ( resource == null ? null : resource . toString ( ) ) . setGlobalSetPolicyRequestResource ( globalSetPolicyRequestResource ) . build ( ) ; return setIamPolicySnapshot ( request ) ;
public class MapSchema { /** * Validates a given value against the schema and configured validation rules . * @ param path a dot notation path to the value . * @ param value a value to be validated . * @ param results a list with validation results to add new results . */ @ SuppressWarnings ( "unchecked" ) @ Override protected void performValidation ( String path , Object value , List < ValidationResult > results ) { } }
String name = path != null ? path : "value" ; value = ObjectReader . getValue ( value ) ; super . performValidation ( path , value , results ) ; if ( value == null ) return ; if ( value instanceof Map < ? , ? > ) { Map < Object , Object > map = ( Map < Object , Object > ) value ; for ( Object key : map . keySet ( ) ) { String elementPath = path == null || path . length ( ) == 0 ? key . toString ( ) : path + "." + key ; performTypeValidation ( elementPath , _keyType , key , results ) ; performTypeValidation ( elementPath , _valueType , map . get ( key ) , results ) ; } } else { results . add ( new ValidationResult ( path , ValidationResultType . Error , "VALUE_ISNOT_MAP" , name + " type must be Map" , "Map" , value . getClass ( ) ) ) ; }
public class Strings { /** * Encodes a { @ linkplain CharSequence } to a HTML conform representation by quoting special characters . * @ param buffer the { @ linkplain StringBuilder } to encode into . * @ param chars the { @ linkplain CharSequence } to encode . * @ return the encoded characters . */ public static StringBuilder encodeHtml ( StringBuilder buffer , CharSequence chars ) { } }
chars . chars ( ) . forEach ( c -> { switch ( c ) { case '<' : buffer . append ( "&lt;" ) ; break ; case '>' : buffer . append ( "&gt;" ) ; break ; case '&' : buffer . append ( "&amp;" ) ; break ; case '"' : buffer . append ( "&quot;" ) ; break ; case '\'' : buffer . append ( "&#39;" ) ; break ; default : if ( Character . isAlphabetic ( c ) ) { buffer . append ( ( char ) c ) ; } else { buffer . append ( "&#" ) . append ( c ) . append ( ';' ) ; } } } ) ; return buffer ;
public class LogFactory { /** * Load log provider from Java services and return the first instance found . It is expected to have only one log service * provider deployed on runtime ; it more found just blindly select the first found . * Returns { @ link DefaultLogProvider } if no log provider service found . * @ return log provider instance . */ private static LogProvider provider ( ) { } }
Iterable < LogProvider > providers = ServiceLoader . load ( LogProvider . class ) ; for ( LogProvider provider : providers ) { // for now ignore multiple implementations and choose blindly the first one return provider ; } return new DefaultLogProvider ( ) ;
public class HTTPChunkedInputStream { /** * Read a line of text from the given Stream and return it * as a String . Assumes lines end in CRLF . */ private String readLine ( InputStream in ) throws IOException { } }
StringBuffer buf = new StringBuffer ( ) ; int c , length = 0 ; while ( true ) { c = in . read ( ) ; if ( c == - 1 || c == '\n' || length > 512 ) { break ; } else if ( c == '\r' ) { in . read ( ) ; return buf . toString ( ) ; } else { buf . append ( ( char ) c ) ; length ++ ; } } return buf . toString ( ) ;
public class KeyTools { /** * 构建RSA密钥对 * @ param keySize keySize ( 必须大于等于512) * @ return RSA密钥对 */ public static KeyHolder buildRSAKey ( int keySize ) { } }
try { KeyPairGenerator generator = KeyPairGenerator . getInstance ( "RSA" ) ; generator . initialize ( keySize ) ; KeyPair keyPair = generator . generateKeyPair ( ) ; PrivateKey privateKey = keyPair . getPrivate ( ) ; PublicKey publicKey = keyPair . getPublic ( ) ; return new KeyHolder ( privateKey , publicKey ) ; } catch ( NoSuchAlgorithmException e ) { throw new SecureException ( "当前系统没有提供生成RSA密钥对的算法" , e ) ; }
public class PrimitivePropertyEditor { /** * According the editor type , this methods checks if value equals * to one of the unknown values defined under { @ link org . biopax . paxtools . model . BioPAXElement } * or is an empty set or set of " unknown " values . * @ param value the value to be checked if it is unknown * @ return true , if value equals to the predefined unknown value */ @ Override public boolean isUnknown ( Object value ) { } }
return ( value instanceof Set ) ? emptySetOrContainsOnlyUnknowns ( ( Set ) value ) : emptySetOrContainsOnlyUnknowns ( Collections . singleton ( value ) ) ;