signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LinkClustering { /** * Computes the similarity of the two edges as the Jaccard index of the * neighbors of two impost nodes . The impost nodes are the two nodes the * edges do not have in common . Subclasses may override this method to * define a new method for computing edge similarity . * < p > < i > Implementation Note < / i > : Subclasses that wish to override this * behavior should be aware that this method is likely to be called by * multiple threads and therefor should make provisions to be thread safe . * In addition , this method may be called more than once per edge pair if * the similarity matrix is being computed on - the - fly . * @ param sm a matrix containing the connections between edges . A non - zero * value in location ( i , j ) indicates a node < i > i < / i > is connected to * node < i > j < / i > by an edge . * @ param e1 an edge to be compared with { @ code e2} * @ param e2 an edge to be compared with { @ code e1} * @ return the similarity of the edges . a */ protected < E extends Edge > double getConnectionSimilarity ( Graph < E > graph , int keystone , int impost1 , int impost2 ) { } }
IntSet n1 = graph . getNeighbors ( impost1 ) ; IntSet n2 = graph . getNeighbors ( impost2 ) ; int n1size = n1 . size ( ) ; int n2size = n2 . size ( ) ; // Swap based on size prior to searching for which vertices are in // common if ( n1size > n2size ) { IntSet tmp = n2 ; n2 = n1 ; n1 = tmp ; int t = impost1 ; impost1 = impost2 ; impost2 = t ; } int inCommon = 0 ; IntIterator it = n1 . iterator ( ) ; while ( it . hasNext ( ) ) { int v = it . nextInt ( ) ; if ( n2 . contains ( v ) ) inCommon ++ ; } if ( n2 . contains ( impost1 ) ) inCommon ++ ; if ( n1 . contains ( impost2 ) ) inCommon ++ ; // NOTE : it doesn ' t matter that n1 and n2 ' s sizes might be potentually // switched since we ' re doing a commutative operation return ( double ) inCommon / ( n1size + n2size + 2 - inCommon ) ;
public class TagTypeImpl { /** * Returns all < code > attribute < / code > elements * @ return list of < code > attribute < / code > */ public List < TldAttributeType < TagType < T > > > getAllAttribute ( ) { } }
List < TldAttributeType < TagType < T > > > list = new ArrayList < TldAttributeType < TagType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "attribute" ) ; for ( Node node : nodeList ) { TldAttributeType < TagType < T > > type = new TldAttributeTypeImpl < TagType < T > > ( this , "attribute" , childNode , node ) ; list . add ( type ) ; } return list ;
public class DynamicByteBuffer { /** * Write medium short array . * @ param values the values */ public void writeMediumShortArray ( short [ ] values ) { } }
int byteSize = values . length * 2 + 2 ; this . addUnsignedShort ( values . length ) ; doWriteShortArray ( values , byteSize ) ;
public class br_upgrade { /** * < pre > * Use this operation to upgrade Repeater Instances . * < / pre > */ public static br_upgrade upgrade ( nitro_service client , br_upgrade resource ) throws Exception { } }
return ( ( br_upgrade [ ] ) resource . perform_operation ( client , "upgrade" ) ) [ 0 ] ;
public class ContentsDao { /** * Get or create a Tile Matrix DAO * @ return tile matrix dao * @ throws SQLException * upon dao creation failure */ private TileMatrixDao getTileMatrixDao ( ) throws SQLException { } }
if ( tileMatrixDao == null ) { tileMatrixDao = DaoManager . createDao ( connectionSource , TileMatrix . class ) ; } return tileMatrixDao ;
public class MethodHandleUtil { /** * 获取对象方法句柄 * @ param target 对象 * @ param methodName 方法名 * @ param ptypes 方法参数类型列表 * @ return */ public static MethodHandle findMethodHandle ( Object target , String methodName , Class < ? > ... ptypes ) { } }
MethodHandle mh = null ; MethodHandles . Lookup lookup = MethodHandles . lookup ( ) ; try { MethodType type = MethodType . methodType ( target . getClass ( ) , ptypes ) ; mh = lookup . findVirtual ( target . getClass ( ) , methodName , type ) ; } catch ( Throwable e ) { } return mh ;
public class RelationalJMapper { /** * This method returns a new instance of Configured Class with this setting : * < table summary = " " > * < tr > * < td > < code > NullPointerControl < / code > < / td > < td > < code > SOURCE < / code > < / td > * < / tr > < tr > * < td > < code > MappingType < / code > of Destination < / td > < td > < code > ALL _ FIELDS < / code > < / td > * < / tr > < tr > * < td > < code > MappingType < / code > of Source < / td > < td > < code > ALL _ FIELDS < / code > < / td > * < / tr > * < / table > * @ param source instance of Target Class type that contains the data * @ return new instance of Configured Class * @ see NullPointerControl * @ see MappingType */ public < S > T manyToOne ( final S source ) { } }
try { return this . < T , S > getJMapper ( relationalManyToOneMapper , source ) . getDestination ( source ) ; } catch ( Exception e ) { return logAndReturnNull ( e ) ; }
public class AbstractMojo { /** * Creates the { @ link StoreConfiguration } . This is a copy of the { @ link # store } * enriched by default values and additional command line parameters . * @ param rootModule * The root module . * @ return The directory . */ private StoreConfiguration getStoreConfiguration ( MavenProject rootModule ) { } }
StoreConfiguration . StoreConfigurationBuilder builder = StoreConfiguration . builder ( ) ; if ( store . getUri ( ) == null ) { File storeDirectory = OptionHelper . selectValue ( new File ( rootModule . getBuild ( ) . getDirectory ( ) , STORE_DIRECTORY ) , this . storeDirectory ) ; storeDirectory . getParentFile ( ) . mkdirs ( ) ; URI uri = new File ( storeDirectory , "/" ) . toURI ( ) ; builder . uri ( uri ) ; } else { builder . uri ( store . getUri ( ) ) ; builder . username ( store . getUsername ( ) ) ; builder . password ( store . getPassword ( ) ) ; builder . encryptionLevel ( store . getEncryptionLevel ( ) ) ; } builder . properties ( store . getProperties ( ) ) ; builder . embedded ( getEmbeddedNeo4jConfiguration ( ) ) ; StoreConfiguration storeConfiguration = builder . build ( ) ; getLog ( ) . debug ( "Using store configuration " + storeConfiguration ) ; return storeConfiguration ;
public class DefaultSystemFailureMapper { /** * This method converts a java Throwable into a " user friendly " error message . * @ param throwable the Throwable to convert * @ return A { @ link Message } containing the hard coded description " The system is currently unavailable . " */ @ Override public Message toMessage ( final Throwable throwable ) { } }
LOG . error ( "The system is currently unavailable" , throwable ) ; return new Message ( Message . ERROR_MESSAGE , InternalMessages . DEFAULT_SYSTEM_ERROR ) ;
public class OutputDataConfigMarshaller { /** * Marshall the given parameter object . */ public void marshall ( OutputDataConfig outputDataConfig , ProtocolMarshaller protocolMarshaller ) { } }
if ( outputDataConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( outputDataConfig . getS3Uri ( ) , S3URI_BINDING ) ; protocolMarshaller . marshall ( outputDataConfig . getKmsKeyId ( ) , KMSKEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EventProcessor { /** * Returns the deserialized event object contained in the { @ code eventEnvelope } */ private Object getEvent ( EventEnvelope eventEnvelope ) { } }
Object event = eventEnvelope . getEvent ( ) ; if ( event instanceof Data ) { event = eventService . nodeEngine . toObject ( event ) ; } return event ;
public class AbstractAmazonKinesisFirehoseDelivery { /** * Method to return the bucket ARN . * @ param bucketName the bucket name to be formulated as ARN * @ return the bucket ARN * @ throws IllegalArgumentException */ protected static String getBucketARN ( String bucketName ) throws IllegalArgumentException { } }
return new StringBuilder ( ) . append ( S3_ARN_PREFIX ) . append ( bucketName ) . toString ( ) ;
public class Config { /** * 得到一个位置的所有特征 * @ param list * @ param index * @ return KeyValue ( 词语 , featureLength * tagNum ) */ public char [ ] [ ] makeFeatureArr ( List < Element > list , int index ) { } }
char [ ] [ ] result = new char [ template . length ] [ ] ; char [ ] chars = null ; int len = 0 ; int i = 0 ; for ( ; i < template . length ; i ++ ) { if ( template [ i ] . length == 0 ) { continue ; } chars = new char [ template [ i ] . length + 1 ] ; len = chars . length - 1 ; for ( int j = 0 ; j < len ; j ++ ) { chars [ j ] = getNameIfOutArr ( list , index + template [ i ] [ j ] ) ; } chars [ len ] = ( char ) ( FEATURE_BEGIN + i ) ; result [ i ] = chars ; } return result ;
public class AgentProperties { /** * Creates a new bean from raw properties that will be parsed . * @ param rawProperties a non - null string * @ param logger a logger ( not null ) * @ return a non - null bean * @ throws IOException if there were files that failed to be written */ public static AgentProperties readIaasProperties ( String rawProperties , Logger logger ) throws IOException { } }
Properties props = Utils . readPropertiesQuietly ( rawProperties , logger ) ; return readIaasProperties ( props ) ;
public class CookieCacheData { /** * Search the cache for an existing Cookie that matches the input name . * @ param name * @ return HttpCookie - - null if this cookie is not in the cache */ public HttpCookie getCookie ( String name ) { } }
if ( null == name || 0 == this . parsedList . size ( ) ) { return null ; } for ( HttpCookie cookie : this . parsedList ) { if ( cookie . getName ( ) . equals ( name ) ) { return cookie ; } } return null ;
public class Chr { /** * / * End universal methods . */ private static char [ ] getCharsFromStringBuilder ( StringBuilder sbuf ) { } }
final int length = sbuf . length ( ) ; char [ ] array2 = new char [ length ] ; sbuf . getChars ( 0 , length , array2 , 0 ) ; return array2 ;
public class KeyUtils { /** * Generates a { @ link RangeHashFunction } . * @ param min * the minimal value to expect * @ param max * the maximal value to expect * @ param buckets * an array with the names of the buckets * @ param suffix * the suffix for all files * @ param prefix * a prefix for all files * @ return String representation of the the RangeHashFunction * @ throws Exception */ public static String generateRangeHashFunction ( byte [ ] min , byte [ ] max , String [ ] buckets , String suffix , String prefix ) throws Exception { } }
if ( compareKey ( min , max ) > 0 ) { throw new Exception ( "The given min is not larger than the max. Buckets could not be determined" ) ; } byte [ ] [ ] ranges = getMaxValsPerRange ( min , max , buckets . length ) ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < min . length ; i ++ ) { sb . append ( "b" ) . append ( "\t" ) ; } sb . append ( "filename" ) . append ( "\n" ) ; for ( int i = 0 ; i < buckets . length ; ++ i ) { byte [ ] val = ranges [ i ] ; for ( int j = 0 ; j < val . length ; j ++ ) { int k = val [ j ] & 0xff ; sb . append ( k + "\t" ) ; } sb . append ( prefix + buckets [ i ] + suffix + "\n" ) ; } return sb . toString ( ) ;
public class XMLSerializer { /** * Returns an the first index from the input string that should be escaped * if escaping is needed , otherwise { @ code - 1 } . * @ param pString the input string that might need escaping . * @ return the first index from the input string that should be escaped , * or { @ code - 1 } . */ private static int needsEscapeElement ( final String pString ) { } }
for ( int i = 0 ; i < pString . length ( ) ; i ++ ) { switch ( pString . charAt ( i ) ) { case '&' : case '<' : case '>' : // case ' \ ' ' : // case ' " ' : return i ; default : } } return - 1 ;
public class KuduDBClientFactory { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . loader . ClientFactory # getSchemaManager ( java . util . Map ) */ @ Override public SchemaManager getSchemaManager ( Map < String , Object > puProperties ) { } }
if ( schemaManager == null ) { initializePropertyReader ( ) ; setExternalProperties ( puProperties ) ; schemaManager = new KuduDBSchemaManager ( KuduDBClientFactory . class . getName ( ) , puProperties , kunderaMetadata ) ; } return schemaManager ;
public class AbstractGitFlowMojo { /** * Fetches and checkouts from remote if local branch doesn ' t exist . * @ param branchName * Branch name to check . * @ throws MojoFailureException * @ throws CommandLineException */ protected void gitFetchRemoteAndCreate ( final String branchName ) throws MojoFailureException , CommandLineException { } }
if ( ! gitCheckBranchExists ( branchName ) ) { getLog ( ) . info ( "Local branch '" + branchName + "' doesn't exist. Trying to fetch and check it out from '" + gitFlowConfig . getOrigin ( ) + "'." ) ; gitFetchRemote ( branchName ) ; gitCreateAndCheckout ( branchName , gitFlowConfig . getOrigin ( ) + "/" + branchName ) ; }
public class ListGroupResourcesResult { /** * A list of < code > QueryError < / code > objects . Each error is an object that contains < code > ErrorCode < / code > and * < code > Message < / code > structures . Possible values for < code > ErrorCode < / code > are * < code > CLOUDFORMATION _ STACK _ INACTIVE < / code > and < code > CLOUDFORMATION _ STACK _ NOT _ EXISTING < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setQueryErrors ( java . util . Collection ) } or { @ link # withQueryErrors ( java . util . Collection ) } if you want to * override the existing values . * @ param queryErrors * A list of < code > QueryError < / code > objects . Each error is an object that contains < code > ErrorCode < / code > * and < code > Message < / code > structures . Possible values for < code > ErrorCode < / code > are * < code > CLOUDFORMATION _ STACK _ INACTIVE < / code > and < code > CLOUDFORMATION _ STACK _ NOT _ EXISTING < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListGroupResourcesResult withQueryErrors ( QueryError ... queryErrors ) { } }
if ( this . queryErrors == null ) { setQueryErrors ( new java . util . ArrayList < QueryError > ( queryErrors . length ) ) ; } for ( QueryError ele : queryErrors ) { this . queryErrors . add ( ele ) ; } return this ;
public class DatabaseSpec { /** * Check that an elasticsearch index contains a specific document * @ param indexName * @ param columnName * @ param columnValue */ @ Then ( "^The Elasticsearch index named '(.+?)' and mapping '(.+?)' contains a column named '(.+?)' with the value '(.+?)'$" ) public void elasticSearchIndexContainsDocument ( String indexName , String mappingName , String columnName , String columnValue ) throws Exception { } }
Assertions . assertThat ( ( commonspec . getElasticSearchClient ( ) . searchSimpleFilterElasticsearchQuery ( indexName , mappingName , columnName , columnValue , "equals" ) . size ( ) ) > 0 ) . isTrue ( ) . withFailMessage ( "The index does not contain that document" ) ;
public class AbstractSettings { /** * / * ( non - Javadoc ) * @ see nyla . solutions . core . util . Settings # getProperty ( java . lang . String ) */ public String getProperty ( String key , boolean throwRequiredException , boolean checkSecured ) { } }
String retval = this . getRawProperty ( key ) ; if ( retval == null || retval . length ( ) == 0 ) { if ( ! throwRequiredException ) return null ; String configSourceLocation = this . getLocation ( ) ; if ( configSourceLocation == null ) throw new MissingConfigPropertiesException ( key ) ; throw new ConfigException ( "Configuration property \"" + key + "\" not found in environment variable, system properties or keys " + " file:" + configSourceLocation ) ; } if ( checkSecured ) { if ( ! retval . startsWith ( Cryption . CRYPTION_PREFIX ) ) { throw new ConfigException ( "Configuration key \"" + key + "\" must be encypted" ) ; } } return Cryption . interpret ( retval ) ;
public class FunctionTable { /** * Obtain a function ID from a given function name * @ param key the function name in a java . lang . String format . * @ return a function ID , which may correspond to one of the FUNC _ XXX values * found in { @ link org . apache . xpath . compiler . FunctionTable } , but may be a * value installed by an external module . */ Object getFunctionID ( String key ) { } }
Object id = m_functionID_customer . get ( key ) ; if ( null == id ) id = m_functionID . get ( key ) ; return id ;
public class CmsDefaultUserSettings { /** * Adds a preference . < p > * @ param name the name of the preference * @ param value the default value * @ param widget the widget to use for the preference * @ param widgetConfig the widget configuration * @ param niceName the nice name of the preference * @ param description the description of the preference * @ param ruleRegex the regex used for validation * @ param error the validation error message * @ param tab the tab to display the preference on */ public void addPreference ( String name , String value , String widget , String widgetConfig , String niceName , String description , String ruleRegex , String error , String tab ) { } }
CmsXmlContentProperty prop = new CmsXmlContentProperty ( name , "string" , widget , widgetConfig , ruleRegex , null , null , niceName , description , error , null ) ; CmsPreferenceData pref = new CmsPreferenceData ( name , value , prop , tab ) ; m_preferenceData . add ( pref ) ;
public class BAMInputFormat { /** * Converts an interval in SimpleInterval format into an htsjdk QueryInterval . * In doing so , a header lookup is performed to convert from contig name to index * @ param interval interval to convert * @ param sequenceDictionary sequence dictionary used to perform the conversion * @ return an equivalent interval in QueryInterval format */ private static QueryInterval convertSimpleIntervalToQueryInterval ( final Interval interval , final SAMSequenceDictionary sequenceDictionary ) { } }
if ( interval == null ) { throw new IllegalArgumentException ( "interval may not be null" ) ; } if ( sequenceDictionary == null ) { throw new IllegalArgumentException ( "sequence dictionary may not be null" ) ; } final int contigIndex = sequenceDictionary . getSequenceIndex ( interval . getContig ( ) ) ; if ( contigIndex == - 1 ) { throw new IllegalArgumentException ( "Contig " + interval . getContig ( ) + " not present in reads sequence " + "dictionary" ) ; } return new QueryInterval ( contigIndex , interval . getStart ( ) , interval . getEnd ( ) ) ;
public class CustomEventHook { /** * Helper method to get the FastAdapter from this ViewHolder * @ param viewHolder * @ return */ @ Nullable public FastAdapter < Item > getFastAdapter ( RecyclerView . ViewHolder viewHolder ) { } }
Object tag = viewHolder . itemView . getTag ( R . id . fastadapter_item_adapter ) ; if ( tag instanceof FastAdapter ) { return ( FastAdapter < Item > ) tag ; } return null ;
public class LeapSeconds { /** * / * [ deutsch ] * < p > Konvertiert die UTC - Angabe zu einem UNIX - Zeitstempel ohne * Schaltsekunden . < / p > * < p > Diese Methode ist die Umkehrung zu { @ code enhance ( ) } . Zu * beachten ist , da & szlig ; keine bijektive Abbildung besteht , d . h . es gilt * manchmal : { @ code enhance ( strip ( val ) ) ! = val } . < / p > * @ param utc elapsed SI - seconds relative to UTC epoch * [ 1972-01-01T00:00:00Z ] including leap seconds * @ return elapsed time in seconds relative to UNIX epoch [ 1970-01-01T00:00:00Z ] without leap seconds * @ see # enhance ( long ) */ public long strip ( long utc ) { } }
if ( utc <= 0 ) { return utc + UNIX_OFFSET ; } // Lineare Suche hier besser als binäre Suche , weil in der // Praxis meistens mit aktuellen Datumswerten gesucht wird final ExtendedLSE [ ] events = this . getEventsInDescendingOrder ( ) ; boolean snls = this . supportsNegativeLS ; for ( int i = 0 ; i < events . length ; i ++ ) { ExtendedLSE lse = events [ i ] ; if ( ( lse . utc ( ) - lse . getShift ( ) < utc ) || ( snls && ( lse . getShift ( ) < 0 ) && ( lse . utc ( ) < utc ) ) ) { utc = Math . addExact ( utc , lse . raw ( ) - lse . utc ( ) ) ; break ; } } return utc + UNIX_OFFSET ;
public class EntityGroupImpl { /** * Delegate to the factory . */ @ Override public void updateMembers ( ) throws GroupsException { } }
// Track objects to invalidate Set < IGroupMember > invalidate = new HashSet < > ( ) ; invalidate . addAll ( getAddedMembers ( ) . values ( ) ) ; invalidate . addAll ( getRemovedMembers ( ) . values ( ) ) ; getLocalGroupService ( ) . updateGroupMembers ( this ) ; clearPendingUpdates ( ) ; // Invalidate objects that changed their relationship with us this . invalidateInParentGroupsCache ( invalidate ) ;
public class XHTMLText { /** * Appends a tag that indicates that an inlined quote section begins . * @ param style the style of the inlined quote * @ return this . */ public XHTMLText appendOpenInlinedQuoteTag ( String style ) { } }
text . halfOpenElement ( Q ) ; text . optAttribute ( STYLE , style ) ; text . rightAngleBracket ( ) ; return this ;
public class LBoolFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < R > LBoolFunctionBuilder < R > boolFunction ( Consumer < LBoolFunction < R > > consumer ) { } }
return new LBoolFunctionBuilder ( consumer ) ;
public class JawrWicketLinkTagHandler { /** * Analyze the tag . If return value = = true , a jawr component will be * created . * @ param tag * The current tag being parsed * @ return If true , tag will become auto - component */ protected boolean analyzeAutolinkCondition ( final ComponentTag tag ) { } }
if ( tag . getId ( ) == null ) { if ( checkRef ( tag ) ) { return true ; } } return false ;
public class RepositoryCache { /** * Create a new workspace in this repository , if the repository is appropriately configured . If the repository already * contains a workspace with the supplied name , then this method simply returns that workspace . Otherwise , this method * attempts to create the named workspace and will return a cache for this newly - created workspace . * @ param name the workspace name * @ return the workspace cache for the new ( or existing ) workspace ; never null * @ throws UnsupportedOperationException if this repository was not configured to allow * { @ link RepositoryConfiguration # isCreatingWorkspacesAllowed ( ) creation of workspaces } . */ public WorkspaceCache createWorkspace ( String name ) { } }
if ( ! workspaceNames . contains ( name ) ) { if ( ! configuration . isCreatingWorkspacesAllowed ( ) ) { throw new UnsupportedOperationException ( JcrI18n . creatingWorkspacesIsNotAllowedInRepository . text ( getName ( ) ) ) ; } // Otherwise , create the workspace and persist it . . . this . workspaceNames . add ( name ) ; refreshRepositoryMetadata ( true ) ; // Now make sure that the " / jcr : system " node is a child of the root node . . . SessionCache session = createSession ( context , name , false ) ; MutableCachedNode root = session . mutable ( session . getRootKey ( ) ) ; ChildReference ref = root . getChildReferences ( session ) . getChild ( JcrLexicon . SYSTEM ) ; if ( ref == null ) { root . linkChild ( session , systemKey , JcrLexicon . SYSTEM ) ; session . save ( ) ; } // And notify the others . . . String userId = context . getSecurityContext ( ) . getUserName ( ) ; Map < String , String > userData = context . getData ( ) ; DateTime timestamp = context . getValueFactories ( ) . getDateFactory ( ) . create ( ) ; RecordingChanges changes = new RecordingChanges ( context . getId ( ) , context . getProcessId ( ) , this . getKey ( ) , null , repositoryEnvironment . journalId ( ) ) ; changes . workspaceAdded ( name ) ; changes . freeze ( userId , userData , timestamp ) ; this . changeBus . notify ( changes ) ; } return workspace ( name ) ;
public class system { /** * Quick toast method with short duration * @ param message toast content */ public static void toast ( String message ) { } }
Toast toast = Toast . makeText ( QuickUtils . getContext ( ) , message , Toast . LENGTH_SHORT ) ; toast . show ( ) ;
public class BaseExtension { /** * Determine if the GeoPackage has the extension * @ param extensionName * extension name * @ return true if has extension */ protected boolean has ( String extensionName ) { } }
List < Extensions > extensions = getExtensions ( extensionName ) ; return extensions != null && ! extensions . isEmpty ( ) ;
public class Image { /** * The applications associated with the image . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setApplications ( java . util . Collection ) } or { @ link # withApplications ( java . util . Collection ) } if you want to * override the existing values . * @ param applications * The applications associated with the image . * @ return Returns a reference to this object so that method calls can be chained together . */ public Image withApplications ( Application ... applications ) { } }
if ( this . applications == null ) { setApplications ( new java . util . ArrayList < Application > ( applications . length ) ) ; } for ( Application ele : applications ) { this . applications . add ( ele ) ; } return this ;
public class AbstractSailthruClient { /** * HTTP GET Request with Map * @ param action API action * @ param data Parameter data * @ throws IOException */ public JsonResponse apiGet ( ApiAction action , Map < String , Object > data ) throws IOException { } }
return httpRequestJson ( action , HttpRequestMethod . GET , data ) ;
public class TaskClient { /** * Perform a batch poll for tasks by task type . Batch size is configurable by count . * @ param taskType Type of task to poll for * @ param workerId Name of the client worker . Used for logging . * @ param count Maximum number of tasks to be returned . Actual number of tasks returned can be less than this number . * @ param timeoutInMillisecond Long poll wait timeout . * @ return List of tasks awaiting to be executed . */ public List < Task > batchPollTasksByTaskType ( String taskType , String workerId , int count , int timeoutInMillisecond ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( taskType ) , "Task type cannot be blank" ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( workerId ) , "Worker id cannot be blank" ) ; Preconditions . checkArgument ( count > 0 , "Count must be greater than 0" ) ; Object [ ] params = new Object [ ] { "workerid" , workerId , "count" , count , "timeout" , timeoutInMillisecond } ; List < Task > tasks = getForEntity ( "tasks/poll/batch/{taskType}" , params , taskList , taskType ) ; tasks . forEach ( this :: populateTaskInput ) ; return tasks ;
public class Sql { /** * Caches the connection used while the closure is active . * If the closure takes a single argument , it will be called * with the connection , otherwise it will be called with no arguments . * @ param closure the given closure * @ throws SQLException if a database error occurs */ public void cacheConnection ( Closure closure ) throws SQLException { } }
boolean savedCacheConnection = cacheConnection ; cacheConnection = true ; Connection connection = null ; try { connection = createConnection ( ) ; callClosurePossiblyWithConnection ( closure , connection ) ; } finally { cacheConnection = false ; closeResources ( connection , null ) ; cacheConnection = savedCacheConnection ; if ( dataSource != null && ! cacheConnection ) { useConnection = null ; } }
public class ElementType { /** * Parses a header line starting an element description . * @ param elementLine Header line . * @ return ElementType without properties . * @ throws IOException if the header line has an invalid format . */ static HeaderEntry parse ( final String elementLine ) throws IOException { } }
if ( ! elementLine . startsWith ( "element " ) ) { throw new IOException ( "not an element: '" + elementLine + "'" ) ; } String definition = elementLine . substring ( "element " . length ( ) ) ; String [ ] parts = definition . split ( " +" , 2 ) ; if ( parts . length != 2 ) { throw new IOException ( "Expected two parts in element definition: '" + elementLine + "'" ) ; } String name = parts [ 0 ] ; String countStr = parts [ 1 ] ; int count ; try { count = Integer . parseInt ( countStr ) ; } catch ( NumberFormatException e ) { throw new IOException ( "Invalid element entry. Not an integer: '" + countStr + "'." ) ; } return new HeaderEntry ( name , count ) ;
public class DOMHelper { /** * Returns the namespace of the given node . Differs from simply getting * the node ' s prefix and using getNamespaceForPrefix in that it attempts * to cache some of the data in NSINFO objects , to avoid repeated lookup . * TODO : Should we consider moving that logic into getNamespaceForPrefix ? * @ param n Node to be examined . * @ return String containing the Namespace Name ( uri ) for this node . * Note that this is undefined for any nodes other than Elements and * Attributes . */ public String getNamespaceOfNode ( Node n ) { } }
String namespaceOfPrefix ; boolean hasProcessedNS ; NSInfo nsInfo ; short ntype = n . getNodeType ( ) ; if ( Node . ATTRIBUTE_NODE != ntype ) { Object nsObj = m_NSInfos . get ( n ) ; // return value nsInfo = ( nsObj == null ) ? null : ( NSInfo ) nsObj ; hasProcessedNS = ( nsInfo == null ) ? false : nsInfo . m_hasProcessedNS ; } else { hasProcessedNS = false ; nsInfo = null ; } if ( hasProcessedNS ) { namespaceOfPrefix = nsInfo . m_namespace ; } else { namespaceOfPrefix = null ; String nodeName = n . getNodeName ( ) ; int indexOfNSSep = nodeName . indexOf ( ':' ) ; String prefix ; if ( Node . ATTRIBUTE_NODE == ntype ) { if ( indexOfNSSep > 0 ) { prefix = nodeName . substring ( 0 , indexOfNSSep ) ; } else { // Attributes don ' t use the default namespace , so if // there isn ' t a prefix , we ' re done . return namespaceOfPrefix ; } } else { prefix = ( indexOfNSSep >= 0 ) ? nodeName . substring ( 0 , indexOfNSSep ) : "" ; } boolean ancestorsHaveXMLNS = false ; boolean nHasXMLNS = false ; if ( prefix . equals ( "xml" ) ) { namespaceOfPrefix = QName . S_XMLNAMESPACEURI ; } else { int parentType ; Node parent = n ; while ( ( null != parent ) && ( null == namespaceOfPrefix ) ) { if ( ( null != nsInfo ) && ( nsInfo . m_ancestorHasXMLNSAttrs == NSInfo . ANCESTORNOXMLNS ) ) { break ; } parentType = parent . getNodeType ( ) ; if ( ( null == nsInfo ) || nsInfo . m_hasXMLNSAttrs ) { boolean elementHasXMLNS = false ; if ( parentType == Node . ELEMENT_NODE ) { NamedNodeMap nnm = parent . getAttributes ( ) ; for ( int i = 0 ; i < nnm . getLength ( ) ; i ++ ) { Node attr = nnm . item ( i ) ; String aname = attr . getNodeName ( ) ; if ( aname . charAt ( 0 ) == 'x' ) { boolean isPrefix = aname . startsWith ( "xmlns:" ) ; if ( aname . equals ( "xmlns" ) || isPrefix ) { if ( n == parent ) nHasXMLNS = true ; elementHasXMLNS = true ; ancestorsHaveXMLNS = true ; String p = isPrefix ? aname . substring ( 6 ) : "" ; if ( p . equals ( prefix ) ) { namespaceOfPrefix = attr . getNodeValue ( ) ; break ; } } } } } if ( ( Node . ATTRIBUTE_NODE != parentType ) && ( null == nsInfo ) && ( n != parent ) ) { nsInfo = elementHasXMLNS ? m_NSInfoUnProcWithXMLNS : m_NSInfoUnProcWithoutXMLNS ; m_NSInfos . put ( parent , nsInfo ) ; } } if ( Node . ATTRIBUTE_NODE == parentType ) { parent = getParentOfNode ( parent ) ; } else { m_candidateNoAncestorXMLNS . addElement ( parent ) ; m_candidateNoAncestorXMLNS . addElement ( nsInfo ) ; parent = parent . getParentNode ( ) ; } if ( null != parent ) { Object nsObj = m_NSInfos . get ( parent ) ; // return value nsInfo = ( nsObj == null ) ? null : ( NSInfo ) nsObj ; } } int nCandidates = m_candidateNoAncestorXMLNS . size ( ) ; if ( nCandidates > 0 ) { if ( ( false == ancestorsHaveXMLNS ) && ( null == parent ) ) { for ( int i = 0 ; i < nCandidates ; i += 2 ) { Object candidateInfo = m_candidateNoAncestorXMLNS . elementAt ( i + 1 ) ; if ( candidateInfo == m_NSInfoUnProcWithoutXMLNS ) { m_NSInfos . put ( m_candidateNoAncestorXMLNS . elementAt ( i ) , m_NSInfoUnProcNoAncestorXMLNS ) ; } else if ( candidateInfo == m_NSInfoNullWithoutXMLNS ) { m_NSInfos . put ( m_candidateNoAncestorXMLNS . elementAt ( i ) , m_NSInfoNullNoAncestorXMLNS ) ; } } } m_candidateNoAncestorXMLNS . removeAllElements ( ) ; } } if ( Node . ATTRIBUTE_NODE != ntype ) { if ( null == namespaceOfPrefix ) { if ( ancestorsHaveXMLNS ) { if ( nHasXMLNS ) m_NSInfos . put ( n , m_NSInfoNullWithXMLNS ) ; else m_NSInfos . put ( n , m_NSInfoNullWithoutXMLNS ) ; } else { m_NSInfos . put ( n , m_NSInfoNullNoAncestorXMLNS ) ; } } else { m_NSInfos . put ( n , new NSInfo ( namespaceOfPrefix , nHasXMLNS ) ) ; } } } return namespaceOfPrefix ;
public class DefaultSentryClientFactory { /** * How long to wait between attempts to flush the disk buffer , in milliseconds . * @ param dsn Sentry server DSN which may contain options . * @ return ow long to wait between attempts to flush the disk buffer , in milliseconds . */ protected long getBufferFlushtime ( Dsn dsn ) { } }
return Util . parseLong ( Lookup . lookup ( BUFFER_FLUSHTIME_OPTION , dsn ) , BUFFER_FLUSHTIME_DEFAULT ) ;
public class PollingUtils { /** * Reset polling * @ param config * the polling config * @ param attributePropertiesManager * the attribute properties manager * @ throws DevFailed */ public static void resetPolling ( final PolledObjectConfig config , final AttributePropertiesManager attributePropertiesManager ) throws DevFailed { } }
config . setPolled ( false ) ; config . setPollingPeriod ( 0 ) ; attributePropertiesManager . setAttributePropertyInDB ( config . getName ( ) , Constants . IS_POLLED , "false" ) ; attributePropertiesManager . setAttributePropertyInDB ( config . getName ( ) , Constants . POLLING_PERIOD , "0" ) ;
public class SimpleBlas { /** * Compute x ^ T * y ( dot product ) */ public static double dot ( DoubleMatrix x , DoubleMatrix y ) { } }
// return NativeBlas . ddot ( x . length , x . data , 0 , 1 , y . data , 0 , 1 ) ; return JavaBlas . rdot ( x . length , x . data , 0 , 1 , y . data , 0 , 1 ) ;
public class BindUploader { /** * Upload binds from local file to stage * @ throws BindException if uploading the binds fails */ private void putBinds ( ) throws BindException { } }
createStageIfNeeded ( ) ; String putStatement = getPutStmt ( bindDir . toString ( ) , stagePath ) ; for ( int i = 0 ; i < PUT_RETRY_COUNT ; i ++ ) { try { SFStatement statement = new SFStatement ( session ) ; SFBaseResultSet putResult = statement . execute ( putStatement , null , null ) ; putResult . next ( ) ; // metadata is 0 - based , result set is 1 - based int column = putResult . getMetaData ( ) . getColumnIndex ( SnowflakeFileTransferAgent . UploadColumns . status . name ( ) ) + 1 ; String status = putResult . getString ( column ) ; if ( SnowflakeFileTransferAgent . ResultStatus . UPLOADED . name ( ) . equals ( status ) ) { return ; // success ! } logger . debug ( "PUT statement failed. The response had status %s." , status ) ; } catch ( SFException | SQLException ex ) { logger . debug ( "Exception encountered during PUT operation. " , ex ) ; } } // if we haven ' t returned ( on success ) , throw exception throw new BindException ( "Failed to PUT files to stage." , BindException . Type . UPLOAD ) ;
public class Util { /** * Determine the full identifier of the current element on the AST . The identifier is built from DOT and IDENT * elements found directly below the specified element . Other elements encountered are ignored . * @ param pAst an AST * @ return the full identifier constructed from either the first encountered IDENT or DOT ; < code > null < / code > if no * identifier could be constructed */ @ CheckForNull public static String getFullIdent ( @ Nonnull final DetailAST pAst ) { } }
String result = null ; DetailAST ast = checkTokens ( pAst . getFirstChild ( ) , TokenTypes . DOT , TokenTypes . IDENT ) ; if ( ast != null ) { StringBuilder sb = new StringBuilder ( ) ; if ( getFullIdentInternal ( ast , sb ) ) { result = sb . toString ( ) ; } } return result ;
public class Calendar { /** * Sets this Calendar ' s current time from the given long value . * An IllegalIcuArgumentException is thrown when millis is outside the range permitted * by a Calendar object when in strict mode . * When in lenient mode the out of range values are pinned to their respective min / max . * @ param millis the new time in UTC milliseconds from the epoch . */ public void setTimeInMillis ( long millis ) { } }
if ( millis > MAX_MILLIS ) { if ( isLenient ( ) ) { millis = MAX_MILLIS ; } else { throw new IllegalArgumentException ( "millis value greater than upper bounds for a Calendar : " + millis ) ; } } else if ( millis < MIN_MILLIS ) { if ( isLenient ( ) ) { millis = MIN_MILLIS ; } else { throw new IllegalArgumentException ( "millis value less than lower bounds for a Calendar : " + millis ) ; } } time = millis ; areFieldsSet = areAllFieldsSet = false ; isTimeSet = areFieldsVirtuallySet = true ; for ( int i = 0 ; i < fields . length ; ++ i ) { fields [ i ] = stamp [ i ] = 0 ; // UNSET = = 0 }
public class LayoutParser { /** * Parse the layout from an XML document . * @ param document An XML document . * @ return The root layout element . */ public static Layout parseDocument ( Document document ) { } }
return new Layout ( instance . parseChildren ( document , null , Tag . LAYOUT ) ) ;
public class Event { /** * Fire the event ( call the listeners ) . */ public void fire ( T event ) { } }
ArrayList < Listener < T > > list1 ; ArrayList < Runnable > list2 ; synchronized ( this ) { if ( listeners == null ) list1 = null ; else list1 = new ArrayList < > ( listeners ) ; if ( listenersRunnable == null ) list2 = null ; else list2 = new ArrayList < > ( listenersRunnable ) ; } if ( list1 != null ) for ( int i = 0 ; i < list1 . size ( ) ; ++ i ) try { list1 . get ( i ) . fire ( event ) ; } catch ( Throwable t ) { LCCore . getApplication ( ) . getDefaultLogger ( ) . error ( "Event listener error: " + list1 . get ( i ) , t ) ; } if ( list2 != null ) for ( int i = 0 ; i < list2 . size ( ) ; ++ i ) try { list2 . get ( i ) . run ( ) ; } catch ( Throwable t ) { LCCore . getApplication ( ) . getDefaultLogger ( ) . error ( "Event listener error: " + list2 . get ( i ) , t ) ; }
public class MLLibUtil { /** * Converts JavaRDD labeled points to JavaRDD DataSets . * @ param data JavaRDD LabeledPoints * @ param numPossibleLabels number of possible labels * @ param preCache boolean pre - cache rdd before operation * @ return */ public static JavaRDD < DataSet > fromLabeledPoint ( JavaRDD < LabeledPoint > data , final long numPossibleLabels , boolean preCache ) { } }
if ( preCache && ! data . getStorageLevel ( ) . useMemory ( ) ) { data . cache ( ) ; } return data . map ( new Function < LabeledPoint , DataSet > ( ) { @ Override public DataSet call ( LabeledPoint lp ) { return fromLabeledPoint ( lp , numPossibleLabels ) ; } } ) ;
public class MarketplaceAgreementsInner { /** * Sign marketplace terms . * @ param publisherId Publisher identifier string of image being deployed . * @ param offerId Offer identifier string of image being deployed . * @ param planId Plan identifier string of image being deployed . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AgreementTermsInner object */ public Observable < AgreementTermsInner > signAsync ( String publisherId , String offerId , String planId ) { } }
return signWithServiceResponseAsync ( publisherId , offerId , planId ) . map ( new Func1 < ServiceResponse < AgreementTermsInner > , AgreementTermsInner > ( ) { @ Override public AgreementTermsInner call ( ServiceResponse < AgreementTermsInner > response ) { return response . body ( ) ; } } ) ;
public class VdmLaunchShortcut { /** * Prompts the user to select a type from the given types . * @ param types * the types to choose from * @ param title * the selection dialog title * @ return the selected type or < code > null < / code > if none . */ protected INode chooseType ( INode [ ] types , String title ) { } }
try { DebugTypeSelectionDialog mmsd = new DebugTypeSelectionDialog ( VdmDebugPlugin . getActiveWorkbenchShell ( ) , types , title , project ) ; if ( mmsd . open ( ) == Window . OK ) { return ( INode ) mmsd . getResult ( ) [ 0 ] ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } return null ;
public class OnBindViewHolderListenerImpl { /** * is called in onViewRecycled to unbind the data on the ViewHolder * @ param viewHolder the viewHolder for the type at this position * @ param position the position of this viewHolder */ @ Override public void unBindViewHolder ( RecyclerView . ViewHolder viewHolder , int position ) { } }
IItem item = FastAdapter . getHolderAdapterItemTag ( viewHolder ) ; if ( item != null ) { item . unbindView ( viewHolder ) ; if ( viewHolder instanceof FastAdapter . ViewHolder ) { ( ( FastAdapter . ViewHolder ) viewHolder ) . unbindView ( item ) ; } // remove set tag ' s viewHolder . itemView . setTag ( R . id . fastadapter_item , null ) ; viewHolder . itemView . setTag ( R . id . fastadapter_item_adapter , null ) ; } else { Log . e ( "FastAdapter" , "The bindView method of this item should set the `Tag` on its itemView (https://github.com/mikepenz/FastAdapter/blob/develop/library-core/src/main/java/com/mikepenz/fastadapter/items/AbstractItem.java#L189)" ) ; }
public class SibRaConsumerSession { /** * ( non - Javadoc ) * Will throw SibRaNotSupportedException * @ see com . ibm . wsspi . sib . core . ConsumerSession # registerStoppableAsynchConsumerCallback ( * com . ibm . wsspi . sib . core . AsynchConsumerCallback , * int , long , int , com . ibm . wsspi . sib . core . OrderingContext ) */ public void registerStoppableAsynchConsumerCallback ( StoppableAsynchConsumerCallback callback , int maxActiveMessages , long messageLockExpiry , int maxBatchSize , OrderingContext extendedMessageOrderingContext , int maxSequentialFailures , long failingMessageDelay ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIIncorrectCallException { } }
throw new SibRaNotSupportedException ( NLS . getString ( "ASYNCHRONOUS_METHOD_CWSIV0250" ) ) ;
public class CSVLoader { /** * loadFolder */ private void loadZIPFile ( TableDefinition tableDef , File file ) throws IOException { } }
try ( ZipFile zipFile = new ZipFile ( file ) ) { Enumeration < ? extends ZipEntry > zipEntries = zipFile . entries ( ) ; while ( zipEntries . hasMoreElements ( ) ) { ZipEntry zipEntry = zipEntries . nextElement ( ) ; if ( ! zipEntry . getName ( ) . toLowerCase ( ) . endsWith ( ".csv" ) ) { m_logger . warn ( "Skipping zip file entry: " + zipEntry . getName ( ) ) ; continue ; } try ( InputStream zipEntryStream = zipFile . getInputStream ( zipEntry ) ) { BufferedReader reader = new BufferedReader ( new InputStreamReader ( zipEntryStream , Utils . UTF8_CHARSET ) ) ; loadCSVFromReader ( tableDef , zipEntry . getName ( ) , zipEntry . getSize ( ) , reader ) ; } } }
public class CreateCompanies { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } }
// Get the CompanyService . CompanyServiceInterface companyService = adManagerServices . get ( session , CompanyServiceInterface . class ) ; // Create an advertiser . Company advertiser = new Company ( ) ; advertiser . setName ( "Advertiser #" + new Random ( ) . nextInt ( Integer . MAX_VALUE ) ) ; advertiser . setType ( CompanyType . ADVERTISER ) ; // Create an agency . Company agency = new Company ( ) ; agency . setName ( "Agency #" + new Random ( ) . nextInt ( Integer . MAX_VALUE ) ) ; agency . setType ( CompanyType . AGENCY ) ; // Create the companies on the server . Company [ ] companies = companyService . createCompanies ( new Company [ ] { advertiser , agency } ) ; for ( Company createdCompany : companies ) { System . out . printf ( "A company with ID %d, name '%s', and type '%s' was created.%n" , createdCompany . getId ( ) , createdCompany . getName ( ) , createdCompany . getType ( ) ) ; }
public class LinkCellule { /** * Return a byte [ ] representation of the LinkCellule . The returned bytes can be * used to create a new LinkCellule object representing the same LinkCellule . * UTF - 8 encoding is used to construct the byte array . */ public byte [ ] getBytes ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getBytes" ) ; byte [ ] b = null ; try { b = string ( ) . getBytes ( "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { FFDCFilter . processException ( e , className + ".getBytes" , "2" , this ) ; } byte [ ] c = new byte [ b . length + 1 ] ; c [ 0 ] = Cellule . LINKCELLULE ; for ( int i = 0 ; i < b . length ; i ++ ) { c [ i + 1 ] = b [ i ] ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getBytes" , c ) ; return c ;
public class LazyCsvAnnotationBeanWriter { /** * レコードのデータを全て書き込みます 。 * < p > ヘッダー行も自動的に処理されます 。 2回目以降に呼び出した場合 、 ヘッダー情報は書き込まれません 。 < / p > * @ param sources 書き込むレコードのデータ 。 * @ param continueOnError continueOnError レコードの処理中に 、 * 例外 { @ link SuperCsvBindingException } が発生しても 、 続行するかどうか指定します 。 * trueの場合 、 例外が発生しても 、 次の処理を行います 。 * @ throws NullPointerException sources is null . * @ throws IOException レコードの出力に失敗した場合 。 * @ throws SuperCsvBindingException セルの値に問題がある場合 * @ throws SuperCsvException 設定など 、 その他に問題がある場合 */ public void writeAll ( final Collection < T > sources , final boolean continueOnError ) throws IOException { } }
Objects . requireNonNull ( sources , "sources should not be null." ) ; if ( ! initialized ) { init ( ) ; } if ( beanMappingCache . getOriginal ( ) . isHeader ( ) && getLineNumber ( ) == 0 ) { writeHeader ( ) ; } for ( T record : sources ) { try { write ( record ) ; } catch ( SuperCsvBindingException e ) { if ( ! continueOnError ) { throw e ; } } } super . flush ( ) ;
public class WebFragmentDescriptorImpl { /** * Returns all < code > context - param < / code > elements * @ return list of < code > context - param < / code > */ public List < ParamValueType < WebFragmentDescriptor > > getAllContextParam ( ) { } }
List < ParamValueType < WebFragmentDescriptor > > list = new ArrayList < ParamValueType < WebFragmentDescriptor > > ( ) ; List < Node > nodeList = model . get ( "context-param" ) ; for ( Node node : nodeList ) { ParamValueType < WebFragmentDescriptor > type = new ParamValueTypeImpl < WebFragmentDescriptor > ( this , "context-param" , model , node ) ; list . add ( type ) ; } return list ;
public class TaskExecutorManager { /** * Adds the given tasks to the task updates data structure . If there is already an update for the * specified task , it is not changed . * @ param tasks the tasks to restore */ public synchronized void restoreTaskUpdates ( List < TaskInfo > tasks ) { } }
for ( TaskInfo task : tasks ) { Pair < Long , Integer > id = new Pair < > ( task . getJobId ( ) , task . getTaskId ( ) ) ; if ( ! mTaskUpdates . containsKey ( id ) ) { mTaskUpdates . put ( id , task ) ; } }
public class DownloadCallable { /** * This method is called only if it is a resumed download . * Adjust the range of the get request , and the expected ( ie current ) file * length of the destination file to append to . */ private void adjustRequest ( GetObjectRequest req ) { } }
long [ ] range = req . getRange ( ) ; long lastByte = range [ 1 ] ; long totalBytesToDownload = lastByte - this . origStartingByte + 1 ; if ( dstfile . exists ( ) ) { if ( ! FileLocks . lock ( dstfile ) ) { throw new FileLockException ( "Fail to lock " + dstfile + " for range adjustment" ) ; } try { expectedFileLength = dstfile . length ( ) ; long startingByte = this . origStartingByte + expectedFileLength ; LOG . info ( "Adjusting request range from " + Arrays . toString ( range ) + " to " + Arrays . toString ( new long [ ] { startingByte , lastByte } ) + " for file " + dstfile ) ; req . setRange ( startingByte , lastByte ) ; totalBytesToDownload = lastByte - startingByte + 1 ; } finally { FileLocks . unlock ( dstfile ) ; } } if ( totalBytesToDownload < 0 ) { throw new IllegalArgumentException ( "Unable to determine the range for download operation. lastByte=" + lastByte + ", origStartingByte=" + origStartingByte + ", expectedFileLength=" + expectedFileLength + ", totalBytesToDownload=" + totalBytesToDownload ) ; }
public class Monitors { /** * Creates a new monitor for a thread pool with standard metrics for the pool size , queue size , * task counts , etc . * @ param id id to differentiate metrics for this pool from others . * @ param pool thread pool instance to monitor . * @ return composite monitor based on stats provided for the pool */ public static CompositeMonitor < ? > newThreadPoolMonitor ( String id , ThreadPoolExecutor pool ) { } }
return newObjectMonitor ( id , new MonitoredThreadPool ( pool ) ) ;
public class Kekulization { /** * Determine the set of atoms that are available to have a double - bond . * @ param graph adjacent list representation * @ param atoms array of atoms * @ param bonds map of atom indices to bonds * @ return atoms that can require a double - bond */ private static BitSet available ( int [ ] [ ] graph , IAtom [ ] atoms , EdgeToBondMap bonds ) { } }
final BitSet available = new BitSet ( ) ; // for all atoms , select those that require a double - bond ATOMS : for ( int i = 0 ; i < atoms . length ; i ++ ) { final IAtom atom = atoms [ i ] ; // preconditions if ( atom . getAtomicNumber ( ) == null ) throw new IllegalArgumentException ( "atom " + ( i + 1 ) + " had unset atomic number" ) ; if ( atom . getFormalCharge ( ) == null ) throw new IllegalArgumentException ( "atom " + ( i + 1 ) + " had unset formal charge" ) ; if ( atom . getImplicitHydrogenCount ( ) == null ) throw new IllegalArgumentException ( "atom " + ( i + 1 ) + " had unset implicit hydrogen count" ) ; if ( ! atom . getFlag ( ISAROMATIC ) ) continue ; // count preexisting pi - bonds , a higher bond order causes a skip int nPiBonds = 0 ; for ( final int w : graph [ i ] ) { IBond . Order order = bonds . get ( i , w ) . getOrder ( ) ; if ( order == DOUBLE ) { nPiBonds ++ ; } else if ( order . numeric ( ) > 2 ) { continue ATOMS ; } } // check if a pi bond can be assigned final int element = atom . getAtomicNumber ( ) ; final int charge = atom . getFormalCharge ( ) ; final int valence = graph [ i ] . length + atom . getImplicitHydrogenCount ( ) + nPiBonds ; if ( available ( element , charge , valence ) ) { available . set ( i ) ; } } return available ;
public class LongHashSet { /** * An long specialised version of { this # remove ( Object ) } . * @ param value the value to remove * @ return true if the value was present , false otherwise */ public boolean remove ( final long value ) { } }
int index = longHash ( value , mask ) ; while ( values [ index ] != missingValue ) { if ( values [ index ] == value ) { values [ index ] = missingValue ; compactChain ( index ) ; size -- ; return true ; } index = next ( index ) ; } return false ;
public class AmazonGameLiftClient { /** * Registers a player ' s acceptance or rejection of a proposed FlexMatch match . A matchmaking configuration may * require player acceptance ; if so , then matches built with that configuration cannot be completed unless all * players accept the proposed match within a specified time limit . * When FlexMatch builds a match , all the matchmaking tickets involved in the proposed match are placed into status * < code > REQUIRES _ ACCEPTANCE < / code > . This is a trigger for your game to get acceptance from all players in the * ticket . Acceptances are only valid for tickets when they are in this status ; all other acceptances result in an * error . * To register acceptance , specify the ticket ID , a response , and one or more players . Once all players have * registered acceptance , the matchmaking tickets advance to status < code > PLACING < / code > , where a new game session * is created for the match . * If any player rejects the match , or if acceptances are not received before a specified timeout , the proposed * match is dropped . The matchmaking tickets are then handled in one of two ways : For tickets where all players * accepted the match , the ticket status is returned to < code > SEARCHING < / code > to find a new match . For tickets * where one or more players failed to accept the match , the ticket status is set to < code > FAILED < / code > , and * processing is terminated . A new matchmaking request for these players can be submitted as needed . * < ul > * < li > * < a > StartMatchmaking < / a > * < / li > * < li > * < a > DescribeMatchmaking < / a > * < / li > * < li > * < a > StopMatchmaking < / a > * < / li > * < li > * < a > AcceptMatch < / a > * < / li > * < li > * < a > StartMatchBackfill < / a > * < / li > * < / ul > * @ param acceptMatchRequest * Represents the input for a request action . * @ return Result of the AcceptMatch operation returned by the service . * @ throws InvalidRequestException * One or more parameter values in the request are invalid . Correct the invalid parameter values before * retrying . * @ throws NotFoundException * A service resource associated with the request could not be found . Clients should not retry such * requests . * @ throws InternalServiceException * The service encountered an unrecoverable internal failure while processing the request . Clients can retry * such requests immediately or after a waiting period . * @ throws UnsupportedRegionException * The requested operation is not supported in the region specified . * @ sample AmazonGameLift . AcceptMatch * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / gamelift - 2015-10-01 / AcceptMatch " target = " _ top " > AWS API * Documentation < / a > */ @ Override public AcceptMatchResult acceptMatch ( AcceptMatchRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAcceptMatch ( request ) ;
public class ManagementGate { /** * Returns the edge arriving at the given index . * @ param index * the index of the edge to be returned * @ return the edge at the given index or < code > null < / code > if no such edge exists */ public ManagementEdge getBackwardEdge ( final int index ) { } }
if ( index < this . backwardEdges . size ( ) ) { return this . backwardEdges . get ( index ) ; } return null ;
public class FleetsApi { /** * Create fleet squad Create a new squad in a fleet - - - SSO Scope : * esi - fleets . write _ fleet . v1 * @ param fleetId * ID for a fleet ( required ) * @ param wingId * The wing _ id to create squad in ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param token * Access token to use if unable to set a header ( optional ) * @ return FleetSquadCreatedResponse * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public FleetSquadCreatedResponse postFleetsFleetIdWingsWingIdSquads ( Long fleetId , Long wingId , String datasource , String token ) throws ApiException { } }
ApiResponse < FleetSquadCreatedResponse > resp = postFleetsFleetIdWingsWingIdSquadsWithHttpInfo ( fleetId , wingId , datasource , token ) ; return resp . getData ( ) ;
public class BrowserOpener { /** * open a URL in the browser that was used to launch SPICE * @ param urlstring string represntation of URL to be opened * @ return true if this was successfull */ public static boolean showDocument ( String urlstring ) { } }
try { URL url = new URL ( urlstring ) ; return showDocument ( url ) ; } catch ( MalformedURLException e ) { logger . warn ( "malformed URL {}" , urlstring , e ) ; return false ; }
public class RaftSessionRegistry { /** * Returns a set of sessions associated with the given service . * @ param primitiveId the service identifier * @ return a collection of sessions associated with the given service */ public Collection < RaftSession > getSessions ( PrimitiveId primitiveId ) { } }
return sessions . values ( ) . stream ( ) . filter ( session -> session . getService ( ) . serviceId ( ) . equals ( primitiveId ) ) . filter ( session -> session . getState ( ) . active ( ) ) . collect ( Collectors . toSet ( ) ) ;
public class PathMatcher { /** * Convenience method for excluding all paths starting with a prefix e . g . " / foo " would exclude " / foo " , " / foo / bar " , etc . * @ param path the prefix for the paths to be excluded * @ return this path matcher */ public PathMatcher excludeBranch ( final String path ) { } }
validatePath ( path ) ; excludedPatterns . add ( Pattern . compile ( "^" + path + "(/.*)?$" ) ) ; return this ;
public class XmlUtilImpl { /** * This method gets the singleton instance of this { @ link XmlUtilImpl } . < br > * < b > ATTENTION : < / b > < br > * Please prefer dependency - injection instead of using this method . * @ return the singleton instance . */ public static XmlUtil getInstance ( ) { } }
if ( instance == null ) { synchronized ( XmlUtilImpl . class ) { if ( instance == null ) { XmlUtilImpl util = new XmlUtilImpl ( ) ; util . initialize ( ) ; instance = util ; } } } return instance ;
public class WampSessionContextHolder { /** * Return the WampSession currently bound to the thread or raise an * { @ link java . lang . IllegalStateException } if none are bound . . * @ return the attributes , never { @ code null } * @ throws java . lang . IllegalStateException if attributes are not bound */ public static WampSession currentAttributes ( ) throws IllegalStateException { } }
WampSession attributes = getAttributes ( ) ; if ( attributes == null ) { throw new IllegalStateException ( "No thread-bound WampSession found. " + "Your code is probably not processing a client message and executing in " + "message-handling methods invoked by the WampAnnotationMethodMessageHandler?" ) ; } return attributes ;
public class CertFactory { /** * Factory method for creating a new { @ link X509Certificate } object from the given parameters . * @ param publicKey * the public key * @ param privateKey * the private key * @ param serialNumber * the serial number * @ param subject * the subject * @ param issuer * the issuer * @ param signatureAlgorithm * the signature algorithm * @ param start * the start * @ param end * the end * @ return the new { @ link X509Certificate } object * @ throws SignatureException * is thrown if a generic signature error occur * @ throws NoSuchAlgorithmException * is thrown if a SecureRandomSpi implementation for the specified algorithm is not * available from the specified provider * @ throws IllegalStateException * is thrown if an illegal state occurs on the generation process * @ throws InvalidKeyException * is thrown if initialization of the cypher object fails on the generation process * @ throws CertificateEncodingException * is thrown whenever an error occurs while attempting to encode a certificate */ public static X509Certificate newX509Certificate ( final PublicKey publicKey , final PrivateKey privateKey , final BigInteger serialNumber , final String subject , final String issuer , final String signatureAlgorithm , final Date start , final Date end ) throws CertificateEncodingException , InvalidKeyException , IllegalStateException , NoSuchAlgorithmException , SignatureException { } }
final X500Principal subjectPrincipal = new X500Principal ( subject ) ; final X500Principal issuerPrincipal = new X500Principal ( issuer ) ; final X509V3CertificateGenerator certificateGenerator = new X509V3CertificateGenerator ( ) ; certificateGenerator . setPublicKey ( publicKey ) ; certificateGenerator . setSerialNumber ( serialNumber ) ; certificateGenerator . setSubjectDN ( subjectPrincipal ) ; certificateGenerator . setIssuerDN ( issuerPrincipal ) ; certificateGenerator . setNotBefore ( start ) ; certificateGenerator . setNotAfter ( end ) ; certificateGenerator . setSignatureAlgorithm ( signatureAlgorithm ) ; final X509Certificate certificate = certificateGenerator . generate ( privateKey ) ; return certificate ;
public class QuickStartSecurityRegistry { /** * { @ inheritDoc } */ @ Override public String getUniqueUserId ( String userSecurityName ) throws EntryNotFoundException , RegistryException { } }
if ( userSecurityName == null ) { throw new IllegalArgumentException ( "userSecurityName is null" ) ; } if ( userSecurityName . isEmpty ( ) ) { throw new IllegalArgumentException ( "userSecurityName is an empty String" ) ; } if ( user . equals ( userSecurityName ) ) { return user ; } else { throw new EntryNotFoundException ( userSecurityName + " does not exist" ) ; }
public class TypeExtractionUtils { /** * Extracts a Single Abstract Method ( SAM ) as defined in Java Specification ( 4.3.2 . The Class Object , * 9.8 Functional Interfaces , 9.4.3 Interface Method Body ) from given class . * @ param baseClass a class that is a FunctionalInterface to retrieve a SAM from * @ throws InvalidTypesException if the given class does not implement FunctionalInterface * @ return single abstract method of the given class */ public static Method getSingleAbstractMethod ( Class < ? > baseClass ) { } }
if ( ! baseClass . isInterface ( ) ) { throw new InvalidTypesException ( "Given class: " + baseClass + "is not a FunctionalInterface." ) ; } Method sam = null ; for ( Method method : baseClass . getMethods ( ) ) { if ( Modifier . isAbstract ( method . getModifiers ( ) ) ) { if ( sam == null ) { sam = method ; } else { throw new InvalidTypesException ( "Given class: " + baseClass + " is not a FunctionalInterface. It has more than one abstract method." ) ; } } } if ( sam == null ) { throw new InvalidTypesException ( "Given class: " + baseClass + " is not a FunctionalInterface. It does not have any abstract methods." ) ; } return sam ;
public class StreamEx { /** * Returns a sequential { @ code StreamEx } which elements are elements of * given array in descending order . * @ param < T > the type of stream elements * @ param array array to get the elements from * @ return the new stream */ public static < T > StreamEx < T > ofReversed ( T [ ] array ) { } }
int size = array . length ; return IntStreamEx . ofIndices ( array ) . mapToObj ( idx -> array [ size - idx - 1 ] ) ;
public class DataStream { /** * Writes a DataStream to the file specified by the path parameter . The * writing is performed periodically every millis milliseconds . * < p > For every field of an element of the DataStream the result of { @ link Object # toString ( ) } * is written . This method can only be used on data streams of tuples . * @ param path * the path pointing to the location the text file is written to * @ param writeMode * Controls the behavior for existing files . Options are * NO _ OVERWRITE and OVERWRITE . * @ param rowDelimiter * the delimiter for two rows * @ param fieldDelimiter * the delimiter for two fields * @ return the closed DataStream */ @ SuppressWarnings ( "unchecked" ) @ PublicEvolving public < X extends Tuple > DataStreamSink < T > writeAsCsv ( String path , WriteMode writeMode , String rowDelimiter , String fieldDelimiter ) { } }
Preconditions . checkArgument ( getType ( ) . isTupleType ( ) , "The writeAsCsv() method can only be used on data streams of tuples." ) ; CsvOutputFormat < X > of = new CsvOutputFormat < > ( new Path ( path ) , rowDelimiter , fieldDelimiter ) ; if ( writeMode != null ) { of . setWriteMode ( writeMode ) ; } return writeUsingOutputFormat ( ( OutputFormat < T > ) of ) ;
public class UTF8String { /** * Based on the given trim string , trim this string starting from right end * This method searches each character in the source string starting from the right end , * removes the character if it is in the trim string , stops at the first character which is not * in the trim string , returns the new string . * @ param trimString the trim character string */ public UTF8String trimRight ( UTF8String trimString ) { } }
if ( trimString == null ) return null ; int charIdx = 0 ; // number of characters from the source string int numChars = 0 ; // array of character length for the source string int [ ] stringCharLen = new int [ numBytes ] ; // array of the first byte position for each character in the source string int [ ] stringCharPos = new int [ numBytes ] ; // build the position and length array while ( charIdx < numBytes ) { stringCharPos [ numChars ] = charIdx ; stringCharLen [ numChars ] = numBytesForFirstByte ( getByte ( charIdx ) ) ; charIdx += stringCharLen [ numChars ] ; numChars ++ ; } // index trimEnd points to the first no matching byte position from the right side of // the source string . int trimEnd = numBytes - 1 ; while ( numChars > 0 ) { UTF8String searchChar = copyUTF8String ( stringCharPos [ numChars - 1 ] , stringCharPos [ numChars - 1 ] + stringCharLen [ numChars - 1 ] - 1 ) ; if ( trimString . find ( searchChar , 0 ) >= 0 ) { trimEnd -= stringCharLen [ numChars - 1 ] ; } else { break ; } numChars -- ; } if ( trimEnd < 0 ) { // empty string return EMPTY_UTF8 ; } else { return copyUTF8String ( 0 , trimEnd ) ; }
public class Parser { public final Parser < RECORD > addDissector ( final Dissector dissector ) { } }
assembled = false ; if ( dissector != null ) { allDissectors . add ( dissector ) ; } return this ;
public class LogRecordContext { /** * Removes an extension key / value from the context * @ param extensionName * String extensionName associated with the registered extension . * @ throws IllegalArgumentException * if parameter < code > extensionName < / code > is < code > null < / code > . */ public static boolean removeExtension ( String extensionName ) { } }
if ( extensionName == null ) { throw new IllegalArgumentException ( "Parameter 'extensionName' can not be null" ) ; } HashMap < String , String > ext = extensions . get ( ) ; return ext == null ? false : ext . remove ( extensionName ) != null ;
public class SimpleReadWriteLock { /** * Execute the provided callable in a read lock . * @ param aSupplier * Callable to be executed . May not be < code > null < / code > . * @ return The return value of the callable . May be < code > null < / code > . */ public boolean readLocked ( @ Nonnull final BooleanSupplier aSupplier ) { } }
readLock ( ) . lock ( ) ; try { return aSupplier . getAsBoolean ( ) ; } finally { readLock ( ) . unlock ( ) ; }
public class MongoDBClient { /** * Method to execute mongo jscripts . * @ param script * jscript in string format * @ return result object . */ @ Override public Object executeScript ( String script ) { } }
Object result = mongoDb . eval ( script ) ; KunderaCoreUtils . printQuery ( "Execute mongo jscripts:" + script , showQuery ) ; return result ;
public class SpiderParam { /** * Sets the time between the requests sent to a server . * @ param requestWait the new request wait time */ public void setRequestWaitTime ( int requestWait ) { } }
this . requestWait = requestWait ; this . getConfig ( ) . setProperty ( SPIDER_REQUEST_WAIT , Integer . toString ( requestWait ) ) ;
public class Content { /** * Retrieve content object . Delegates { @ link # getValue ( Object , String ) } to obtain the requested value . If value is * null warn the event ; in any case return value . * @ param scope scope object , * @ param propertyPath object property path . * @ return content object or null . * @ throws TemplateException if requested value is undefined . */ Object getObject ( Object scope , String propertyPath ) throws TemplateException { } }
Object object = getValue ( scope , propertyPath ) ; if ( object == null ) { warn ( scope . getClass ( ) , propertyPath ) ; } return object ;
public class ValidateProxy { /** * private ArrayList < Class < ? > > parameters ( Method method ) * ArrayList < Class < ? > > params = new ArrayList < > ( ) ; * for ( Class < ? > paramType : method . getParameterTypes ( ) ) { * if ( ! _ resultClasses . contains ( paramType ) ) { * params . add ( paramType ) ; * return params ; */ private String proxyClassName ( ) { } }
String simpleName = _proxyClass . getSimpleName ( ) ; int p = simpleName . lastIndexOf ( "__" ) ; if ( p > 0 ) { return simpleName . substring ( 0 , p ) ; } else { return simpleName ; }
public class CommonUtils { /** * Perform the copying of all fields from { @ code src } to { @ code dest } . The code was copied from * { @ code org . springframework . util . ReflectionUtils # shallowCopyFieldState ( Object , Object ) } . */ public static < S , D extends S > void copyFields ( final S src , D dest ) throws IllegalArgumentException { } }
Class < ? > targetClass = src . getClass ( ) ; do { Field [ ] fields = targetClass . getDeclaredFields ( ) ; for ( Field field : fields ) { // Skip static fields : if ( Modifier . isStatic ( field . getModifiers ( ) ) ) { continue ; } try { if ( ( ! Modifier . isPublic ( field . getModifiers ( ) ) || ! Modifier . isPublic ( field . getDeclaringClass ( ) . getModifiers ( ) ) || Modifier . isFinal ( field . getModifiers ( ) ) ) && ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } Object srcValue = field . get ( src ) ; field . set ( dest , srcValue ) ; } catch ( IllegalAccessException ex ) { throw new IllegalStateException ( "Shouldn't be illegal to access field '" + field . getName ( ) + "': " + ex ) ; } } targetClass = targetClass . getSuperclass ( ) ; } while ( targetClass != null && targetClass != Object . class ) ;
public class AvroUtils { /** * Converts from one Avro schema to one Pangool schema for de - serializing it */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public static Schema toPangoolSchema ( org . apache . avro . Schema avroSchema ) { List < Field > fields = new ArrayList < Field > ( ) ; for ( org . apache . avro . Schema . Field avroField : avroSchema . getFields ( ) ) { org . apache . avro . Schema . Type type = avroField . schema ( ) . getType ( ) ; Field pangoolField ; switch ( type ) { case INT : pangoolField = Field . create ( avroField . name ( ) , Type . INT ) ; break ; case LONG : pangoolField = Field . create ( avroField . name ( ) , Type . LONG ) ; break ; case FLOAT : pangoolField = Field . create ( avroField . name ( ) , Type . FLOAT ) ; break ; case DOUBLE : pangoolField = Field . create ( avroField . name ( ) , Type . DOUBLE ) ; break ; case BOOLEAN : pangoolField = Field . create ( avroField . name ( ) , Type . BOOLEAN ) ; break ; case STRING : pangoolField = Field . create ( avroField . name ( ) , Type . STRING ) ; break ; case BYTES : if ( avroField . getProp ( Field . METADATA_BYTES_AS_OBJECT ) == null ) { pangoolField = Field . create ( avroField . name ( ) , Type . BYTES ) ; } else { try { String objectClazz = avroField . getProp ( Field . METADATA_OBJECT_CLASS ) ; pangoolField = Field . createObject ( avroField . name ( ) , Class . forName ( objectClazz ) ) ; String serializationString = avroField . getProp ( Field . METADATA_OBJECT_SERIALIZATION ) ; if ( serializationString != null ) { Class < ? extends Serialization > ser = ( serializationString == null ) ? null : ( Class < ? extends Serialization > ) Class . forName ( serializationString ) ; pangoolField . setObjectSerialization ( ser ) ; } } catch ( ClassNotFoundException e ) { throw new PangoolRuntimeException ( e ) ; } } break ; case ENUM : String objectClazz = avroField . getProp ( Field . METADATA_OBJECT_CLASS ) ; try { pangoolField = Field . createEnum ( avroField . name ( ) , Class . forName ( objectClazz ) ) ; } catch ( ClassNotFoundException e ) { throw new PangoolRuntimeException ( e ) ; } break ; default : throw new PangoolRuntimeException ( "Avro type:" + type + " can't be converted to Pangool Schema type" ) ; } // add properties for ( Map . Entry < String , String > entry : avroField . props ( ) . entrySet ( ) ) { if ( ! Field . RESERVED_KEYWORDS . contains ( entry . getKey ( ) ) ) { pangoolField . addProp ( entry . getKey ( ) , entry . getValue ( ) ) ; } } fields . add ( pangoolField ) ; } Schema schema = new Schema ( avroSchema . getFullName ( ) , fields ) ; return schema ;
public class Restarter { /** * Initialize restart support . See * { @ link # initialize ( String [ ] , boolean , RestartInitializer ) } for details . * @ param args main application arguments * @ param initializer the restart initializer * @ see # initialize ( String [ ] , boolean , RestartInitializer ) */ public static void initialize ( String [ ] args , RestartInitializer initializer ) { } }
initialize ( args , false , initializer , true ) ;
public class Filters { /** * Filter for extensions installed by specified installer . Use only for { @ link ConfigItem # Extension } items . * @ param type installer class * @ return extensions installed by specified installer filter */ public static Predicate < ExtensionItemInfo > installedBy ( final Class < ? extends FeatureInstaller > type ) { } }
return input -> type . equals ( input . getInstalledBy ( ) ) ;
public class ClassFieldAccessorFactory { /** * Builds the class header */ protected static ClassWriter buildClassHeader ( Class < ? > superClass , String className ) { } }
ClassWriter cw = createClassWriter ( superClass . getClassLoader ( ) , Opcodes . ACC_PUBLIC + Opcodes . ACC_SUPER , className , null , Type . getInternalName ( superClass ) , null ) ; cw . visitSource ( null , null ) ; return cw ;
public class ControlMessageFactoryImpl { /** * Create a new , empty ControlAccept message * @ return The new ControlAccept * @ exception MessageCreateFailedException Thrown if such a message can not be created */ public final ControlAccept createNewControlAccept ( ) throws MessageCreateFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createNewControlAccept" ) ; ControlAccept msg = null ; try { msg = new ControlAcceptImpl ( MfpConstants . CONSTRUCTOR_NO_OP ) ; } catch ( MessageDecodeFailedException e ) { /* No need to FFDC this as JsMsgObject will already have done so */ // No FFDC code needed throw new MessageCreateFailedException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createNewControlAccept" ) ; return msg ;
public class DataServiceTokenService { /** * Find a user by a security token * @ param token security token * @ return the user or null if not found or token is expired */ @ Override @ Transactional ( readOnly = true ) @ RunAsSystem public UserDetails findUserByToken ( String token ) { } }
Token molgenisToken = getMolgenisToken ( token ) ; return userDetailsService . loadUserByUsername ( molgenisToken . getUser ( ) . getUsername ( ) ) ;
public class KeyVaultClientBaseImpl { /** * Permanently deletes the specified key . * The Purge Deleted Key operation is applicable for soft - delete enabled vaults . While the operation can be invoked on any vault , it will return an error if invoked on a non soft - delete enabled vault . This operation requires the keys / purge permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param keyName The name of the key * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void purgeDeletedKey ( String vaultBaseUrl , String keyName ) { } }
purgeDeletedKeyWithServiceResponseAsync ( vaultBaseUrl , keyName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class UploadService { /** * Stops the upload task with the given uploadId . * @ param uploadId The unique upload id */ public static synchronized void stopUpload ( final String uploadId ) { } }
UploadTask removedTask = uploadTasksMap . get ( uploadId ) ; if ( removedTask != null ) { removedTask . cancel ( ) ; }
public class CdiInjector { /** * Injects dependencies into the given target object whose lifecycle is not managed by the * BeanManager itself . * @ param target * an object with injection points */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) @ Override public void injectFields ( Object target ) { BeanManager mgr = BeanManagerLookup . getBeanManager ( ) ; AnnotatedType annotatedType = mgr . createAnnotatedType ( target . getClass ( ) ) ; InjectionTarget injectionTarget = mgr . createInjectionTarget ( annotatedType ) ; CreationalContext context = mgr . createCreationalContext ( null ) ; injectionTarget . inject ( target , context ) ;
public class JaxbSerializer { /** * { @ inheritDoc } */ @ Override public Object fromByteBuffer ( ByteBuffer bytes ) { } }
if ( bytes == null || ! bytes . hasRemaining ( ) ) { return null ; } int l = bytes . remaining ( ) ; ByteArrayInputStream bais = new ByteArrayInputStream ( bytes . array ( ) , bytes . arrayOffset ( ) + bytes . position ( ) , l ) ; try { XMLStreamReader reader = createStreamReader ( bais ) ; Object ret = unmarshaller . get ( ) . unmarshal ( reader ) ; reader . close ( ) ; return ret ; } catch ( JAXBException e ) { throw new HectorSerializationException ( "Jaxb exception occurred during deserialization." , e ) ; } catch ( XMLStreamException e ) { throw new HectorSerializationException ( "Exception reading XML stream." , e ) ; }
public class PrintConfigCommand { /** * Generate the Json representation of the given map . * @ param map the map to print out . * @ return the Json representation . * @ throws JsonProcessingException when the Json cannot be processed . */ @ SuppressWarnings ( "static-method" ) protected String generateJson ( Map < String , Object > map ) throws JsonProcessingException { } }
final ObjectMapper mapper = new ObjectMapper ( ) ; return mapper . writerWithDefaultPrettyPrinter ( ) . writeValueAsString ( map ) ;
public class ColumnModel { /** * The value of this column will be generated by a sequence */ public ColumnModel sequenceGenerator ( String name , String sequenceName , Integer initialValue , Integer allocationSize ) { } }
makeSureTableModelExist ( ) ; this . tableModel . sequenceGenerator ( name , sequenceName , initialValue , allocationSize ) ; this . idGenerationType = GenerationType . SEQUENCE ; this . idGeneratorName = name ; return this ;
public class IntentUtils { /** * Grant permissions to read / write the given URI . * Take from : http : / / stackoverflow . com / a / 33754937/1234900 */ private static void grantUriPermissionsForIntent ( final Activity context , final Uri outputDestination , final Intent intent ) { } }
if ( Build . VERSION . SDK_INT < Build . VERSION_CODES . LOLLIPOP ) { final List < ResolveInfo > resInfoList = context . getPackageManager ( ) . queryIntentActivities ( intent , PackageManager . MATCH_DEFAULT_ONLY ) ; for ( ResolveInfo resolveInfo : resInfoList ) { final String packageName = resolveInfo . activityInfo . packageName ; context . grantUriPermission ( packageName , outputDestination , Intent . FLAG_GRANT_WRITE_URI_PERMISSION | Intent . FLAG_GRANT_READ_URI_PERMISSION ) ; } }
public class Matcher { /** * Sets the limits of this matcher ' s region . The region is the part of the * input sequence that will be searched to find a match . Invoking this * method resets the matcher , and then sets the region to start at the * index specified by the < code > start < / code > parameter and end at the * index specified by the < code > end < / code > parameter . * @ param start * The index to start searching at ( inclusive ) * @ param end * The index to end searching at ( exclusive ) * @ throws IndexOutOfBoundsException * If start or end is less than zero , if * start is greater than the length of the input sequence , if * end is greater than the length of the input sequence , or if * start is greater than end . * @ return this matcher * @ since 1.5 */ public Matcher region ( int start , int end ) { } }
if ( ( start < 0 ) || ( start < 0 ) ) throw new IndexOutOfBoundsException ( "start" ) ; if ( ( end < 0 ) || ( end > matcher . dataEnd ( ) ) ) throw new IndexOutOfBoundsException ( "end" ) ; if ( start > end ) throw new IndexOutOfBoundsException ( "start > end" ) ; matcher . setTarget ( matcher . targetChars ( ) , start , end - start ) ; return this ;