signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Futures { /** * Returns a new { @ code ListenableFuture } whose result is the product of * applying the given { @ code Function } to the result of the given { @ code * Future } . Example : * < pre > { @ code * ListenableFuture < QueryResult > queryFuture = . . . ; * Function < QueryResult , List < Row > > rowsFunction = * new Function < QueryResult , List < Row > > ( ) { * public List < Row > apply ( QueryResult queryResult ) { * return queryResult . getRows ( ) ; * ListenableFuture < List < Row > > rowsFuture = * transform ( queryFuture , rowsFunction , executor ) ; } < / pre > * < p > The returned { @ code Future } attempts to keep its cancellation state in * sync with that of the input future . That is , if the returned { @ code Future } * is cancelled , it will attempt to cancel the input , and if the input is * cancelled , the returned { @ code Future } will receive a callback in which it * will attempt to cancel itself . * < p > An example use of this method is to convert a serializable object * returned from an RPC into a POJO . * < p > When the transformation is fast and lightweight , consider { @ linkplain * # transform ( ListenableFuture , Function ) omitting the executor } or * explicitly specifying { @ code directExecutor } . However , be aware of the * caveats documented in the link above . * @ param input The future to transform * @ param function A Function to transform the results of the provided future * to the results of the returned future . * @ param executor Executor to run the function in . * @ return A future that holds result of the transformation . * @ since 9.0 ( in 2.0 as { @ code compose } ) */ public static < I , O > ListenableFuture < O > transform ( ListenableFuture < I > input , final Function < ? super I , ? extends O > function , Executor executor ) { } }
checkNotNull ( function ) ; return transform ( input , asAsyncFunction ( function ) , executor ) ;
public class BusinessProcess { /** * @ see # startTask ( String ) * this method allows to start a conversation if no conversation is active */ public Task startTask ( String taskId , boolean beginConversation ) { } }
if ( beginConversation ) { Conversation conversation = conversationInstance . get ( ) ; if ( conversation . isTransient ( ) ) { conversation . begin ( ) ; } } return startTask ( taskId ) ;
public class QueryResult { /** * The primary key of the item where the operation stopped , inclusive of the previous result set . Use this value to * start a new operation , excluding this value in the new request . * If < code > LastEvaluatedKey < / code > is empty , then the " last page " of results has been processed and there is no * more data to be retrieved . * If < code > LastEvaluatedKey < / code > is not empty , it does not necessarily mean that there is more data in the result * set . The only way to know when you have reached the end of the result set is when < code > LastEvaluatedKey < / code > * is empty . * @ param lastEvaluatedKey * The primary key of the item where the operation stopped , inclusive of the previous result set . Use this * value to start a new operation , excluding this value in the new request . < / p > * If < code > LastEvaluatedKey < / code > is empty , then the " last page " of results has been processed and there is * no more data to be retrieved . * If < code > LastEvaluatedKey < / code > is not empty , it does not necessarily mean that there is more data in the * result set . The only way to know when you have reached the end of the result set is when * < code > LastEvaluatedKey < / code > is empty . * @ return Returns a reference to this object so that method calls can be chained together . */ public QueryResult withLastEvaluatedKey ( java . util . Map < String , AttributeValue > lastEvaluatedKey ) { } }
setLastEvaluatedKey ( lastEvaluatedKey ) ; return this ;
public class MigrateArgs { /** * Set { @ literal AUTH } { @ code password } option . * @ param password must not be { @ literal null } . * @ return { @ code this } { @ link MigrateArgs } . * @ since 4.4.5 */ public MigrateArgs < K > auth ( char [ ] password ) { } }
LettuceAssert . notNull ( password , "Password must not be null" ) ; this . password = Arrays . copyOf ( password , password . length ) ; return this ;
public class RegisteredResources { /** * Possibly update the heuristic state of the transaction . * This is only required if this is a subordinate . * If we are a subordinate we need to update the state and log it for recovery . * @ param commit - true if requested to commit , else false */ private void updateHeuristicState ( boolean commit ) throws SystemException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "updateHeuristicState" , commit ) ; if ( _transaction . isSubordinate ( ) ) { // Get the current transaction state . Need to do this in case we are // in recovery and have already logged a heuristic final TransactionState ts = _transaction . getTransactionState ( ) ; final int state = ts . getState ( ) ; if ( commit ) { if ( state != TransactionState . STATE_HEURISTIC_ON_COMMIT ) ts . setState ( TransactionState . STATE_HEURISTIC_ON_COMMIT ) ; } else { // if state is ACTIVE , then this is called via // rollbackResources , so do not change state if ( state != TransactionState . STATE_HEURISTIC_ON_ROLLBACK && state != TransactionState . STATE_ACTIVE ) ts . setState ( TransactionState . STATE_HEURISTIC_ON_ROLLBACK ) ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "updateHeuristicState" ) ;
public class JDBCSQLXML { /** * Retrieves the XML value designated by this SQLXML instance as a stream . * The bytes of the input stream are interpreted according to appendix F of the XML 1.0 specification . * The behavior of this method is the same as ResultSet . getBinaryStream ( ) * when the designated column of the ResultSet has a type java . sql . Types of SQLXML . * The SQL XML object becomes not readable when this method is called and * may also become not writable depending on implementation . * @ return a stream containing the XML data . * @ throws SQLException if there is an error processing the XML value . * An exception is thrown if the state is not readable . * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.6 */ public synchronized InputStream getBinaryStream ( ) throws SQLException { } }
checkClosed ( ) ; checkReadable ( ) ; InputStream inputStream = getBinaryStreamImpl ( ) ; setReadable ( false ) ; setWritable ( false ) ; return inputStream ;
public class NodeWrapper { /** * { @ inheritDoc } */ @ Override public boolean isSameNodeInfo ( final NodeInfo other ) { } }
boolean retVal ; if ( ! ( other instanceof NodeInfo ) ) { retVal = false ; } else { retVal = ( ( NodeWrapper ) other ) . mKey == mKey ; } return retVal ;
public class Clock { /** * Defines if alarms are enabled . * If false then no alarms will be triggered . * @ param CHECK */ public void setAlarmsEnabled ( final boolean CHECK ) { } }
if ( null == alarmsEnabled ) { _alarmsEnabled = CHECK ; fireUpdateEvent ( VISIBILITY_EVENT ) ; } else { alarmsEnabled . set ( CHECK ) ; }
public class PropertiesBase { /** * Gets all properties with a key starting with prefix . * @ param prefix * @ param removePrefix * remove prefix in the resulting properties or not * @ return properties starting with prefix */ Properties getProperties ( String prefix , boolean removePrefix ) { } }
Properties result = new Properties ( ) ; for ( String key : getPropertyNames ( ) ) { if ( key . startsWith ( prefix ) ) { result . put ( ( removePrefix ) ? key . substring ( prefix . length ( ) ) : key , getProperty ( key ) ) ; } } return result ;
public class Snappy { /** * Compress the content in the given input buffer . After the compression , * you can retrieve the compressed data from the output buffer [ pos ( ) . . . * limit ( ) ) ( compressed data size = limit ( ) - pos ( ) = remaining ( ) ) * @ param uncompressed buffer [ pos ( ) . . . limit ( ) ) containing the input data * @ param compressed output of the compressed data . Uses range [ pos ( ) . . ] . * @ return byte size of the compressed data . * @ throws SnappyError when the input is not a direct buffer */ public static int compress ( ByteBuffer uncompressed , ByteBuffer compressed ) throws IOException { } }
if ( ! uncompressed . isDirect ( ) ) { throw new SnappyError ( SnappyErrorCode . NOT_A_DIRECT_BUFFER , "input is not a direct buffer" ) ; } if ( ! compressed . isDirect ( ) ) { throw new SnappyError ( SnappyErrorCode . NOT_A_DIRECT_BUFFER , "destination is not a direct buffer" ) ; } // input : uncompressed [ pos ( ) , limit ( ) ) // output : compressed int uPos = uncompressed . position ( ) ; int uLen = uncompressed . remaining ( ) ; int compressedSize = impl . rawCompress ( uncompressed , uPos , uLen , compressed , compressed . position ( ) ) ; // pos limit // [ . . . . . BBBBB . . . . . ] compressed . limit ( compressed . position ( ) + compressedSize ) ; return compressedSize ;
public class SharedFieldCache { /** * Creates a < code > ValueIndex < / code > for a < code > field < / code > and a term * < code > prefix < / code > . The term prefix acts as the property name for the * shared < code > field < / code > . * < br > * This method is an adapted version of : < code > FieldCacheImpl . getStringIndex ( ) < / code > * @ param reader the < code > IndexReader < / code > . * @ param field name of the shared field . * @ param prefix the property name , will be used as term prefix . * @ return a ValueIndex that contains the field values and order * information . * @ throws IOException if an error occurs while reading from the index . */ public ValueIndex getValueIndex ( IndexReader reader , String field , String prefix ) throws IOException { } }
if ( reader instanceof ReadOnlyIndexReader ) { reader = ( ( ReadOnlyIndexReader ) reader ) . getBase ( ) ; } field = field . intern ( ) ; ValueIndex ret = lookup ( reader , field , prefix ) ; if ( ret == null ) { Comparable < ? > [ ] retArray = new Comparable [ reader . maxDoc ( ) ] ; int setValues = 0 ; if ( retArray . length > 0 ) { IndexFormatVersion version = IndexFormatVersion . getVersion ( reader ) ; boolean hasPayloads = version . isAtLeast ( IndexFormatVersion . V3 ) ; TermDocs termDocs ; byte [ ] payload = null ; int type ; if ( hasPayloads ) { termDocs = reader . termPositions ( ) ; payload = new byte [ 1 ] ; } else { termDocs = reader . termDocs ( ) ; } TermEnum termEnum = reader . terms ( new Term ( field , prefix ) ) ; int prefixLength = prefix . length ( ) ; try { if ( termEnum . term ( ) == null ) { throw new RuntimeException ( "no terms in field " + field ) ; } do { Term term = termEnum . term ( ) ; String text ; if ( term . field ( ) != field || ! ( text = term . text ( ) ) . startsWith ( prefix ) ) { break ; } String value = text . substring ( prefixLength ) ; termDocs . seek ( termEnum ) ; while ( termDocs . next ( ) ) { type = PropertyType . UNDEFINED ; if ( hasPayloads ) { TermPositions termPos = ( TermPositions ) termDocs ; termPos . nextPosition ( ) ; if ( termPos . isPayloadAvailable ( ) ) { payload = termPos . getPayload ( payload , 0 ) ; type = payload [ 0 ] ; } } setValues ++ ; retArray [ termDocs . doc ( ) ] = getValue ( value , type ) ; } } while ( termEnum . next ( ) ) ; } finally { termDocs . close ( ) ; termEnum . close ( ) ; } } ValueIndex value = new ValueIndex ( retArray , setValues ) ; store ( reader , field , prefix , value ) ; return value ; } return ret ;
public class DTMDocumentImpl { /** * Given a node handle , advance to the next node on the preceding axis . * @ param axisContextHandle the start of the axis that is being traversed . * @ param nodeHandle the id of the node . * @ return int Node - number of preceding sibling , * or DTM . NULL to indicate none exists . */ public int getNextPreceding ( int axisContextHandle , int nodeHandle ) { } }
// # # # shs copied from Xalan 1 , what is this suppose to do ? nodeHandle &= NODEHANDLE_MASK ; while ( nodeHandle > 1 ) { nodeHandle -- ; if ( ATTRIBUTE_NODE == ( nodes . readEntry ( nodeHandle , 0 ) & 0xFFFF ) ) continue ; // if nodeHandle is _ not _ an ancestor of // axisContextHandle , specialFind will return it . // If it _ is _ an ancestor , specialFind will return - 1 // % REVIEW % unconditional return defeats the // purpose of the while loop - - does this // logic make any sense ? return ( m_docHandle | nodes . specialFind ( axisContextHandle , nodeHandle ) ) ; } return NULL ;
public class Shape { /** * Get the offset of the specified [ dim0 , dim1 , dim2 ] for the 3d array * @ param shapeInformation Shape information * @ param dim0 Row index to get the offset for * @ param dim1 Column index to get the offset for * @ param dim2 dimension 2 index to get the offset for * @ return Buffer offset */ public static long getOffset ( DataBuffer shapeInformation , int dim0 , int dim1 , int dim2 ) { } }
int rank = rank ( shapeInformation ) ; if ( rank != 3 ) throw new IllegalArgumentException ( "Cannot use this getOffset method on arrays of rank != 3 (rank is: " + rank + ")" ) ; return getOffsetUnsafe ( shapeInformation , dim0 , dim1 , dim2 ) ;
public class Dom { public static boolean matches ( Node node , String requiredLocalName , Namespace requiredNamespace ) { } }
return hasNamespace ( node , requiredNamespace ) && matchingLocalName ( node , requiredLocalName ) ;
public class InsertIntoTable { /** * Adds all rows from the file specified , using the provided parser . * @ param file File to read the data from . * @ param fileParser Parser to be used to parse the file . * @ return { @ code this } */ public InsertIntoTable addRowsFrom ( File file , FileParser fileParser ) { } }
builder . addRowsFrom ( file , fileParser ) ; return this ;
public class ReferenceValueFactory { /** * Create a new instance . * @ param decoder the text decoder ; may be null if the default decoder should be used * @ param factories the set of value factories , used to obtain the { @ link ValueFactories # getStringFactory ( ) string value * factory } ; may not be null * @ param weak true if this factory should create weak references , or false if it should create strong references * @ param simple true if this factory should create simple references , false otherwise * @ return the new reference factory ; never null */ public static ReferenceValueFactory newInstance ( TextDecoder decoder , ValueFactories factories , boolean weak , boolean simple ) { } }
if ( simple ) { return new ReferenceValueFactory ( PropertyType . SIMPLEREFERENCE , decoder , factories , weak , simple ) ; } return new ReferenceValueFactory ( weak ? PropertyType . WEAKREFERENCE : PropertyType . REFERENCE , decoder , factories , weak , simple ) ;
public class ClusteringService { /** * Sends a message of a given type across a cluster . * @ param payload the main body of the message ; must not be { @ code null } * @ return { @ code true } if the send operation was successful , { @ code false } otherwise */ public boolean sendMessage ( Serializable payload ) { } }
if ( ! isOpen ( ) || ! multipleMembersInCluster ( ) ) { return false ; } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "{0} SENDING {1} " , toString ( ) , payload ) ; } try { byte [ ] messageData = toByteArray ( payload ) ; Message jgMessage = new Message ( null , channel . getAddress ( ) , messageData ) ; channel . send ( jgMessage ) ; return true ; } catch ( Exception e ) { // Something went wrong here throw new SystemFailureException ( ClusteringI18n . errorSendingMessage . text ( clusterName ( ) ) , e ) ; }
public class Lz4CompressUtils { /** * When the exact decompressed size is unknown . * Decompress data size cannot be larger then maxDecompressedSize */ public static byte [ ] decompressSafe ( final byte [ ] src , int maxDecompressedSize ) { } }
if ( src == null ) { throw new IllegalArgumentException ( "src must not be null." ) ; } if ( maxDecompressedSize <= 0 ) { throw new IllegalArgumentException ( "maxDecompressedSize must be larger than 0 but " + maxDecompressedSize ) ; } LZ4SafeDecompressor decompressor = factory . safeDecompressor ( ) ; return decompressor . decompress ( src , maxDecompressedSize ) ;
public class SwimMembershipProtocol { /** * Unregisters handlers for the SWIM protocol . */ private void unregisterHandlers ( ) { } }
// Unregister TCP message handlers . bootstrapService . getMessagingService ( ) . unregisterHandler ( MEMBERSHIP_SYNC ) ; bootstrapService . getMessagingService ( ) . unregisterHandler ( MEMBERSHIP_PROBE ) ; bootstrapService . getMessagingService ( ) . unregisterHandler ( MEMBERSHIP_PROBE_REQUEST ) ; // Unregister UDP message listeners . bootstrapService . getUnicastService ( ) . removeListener ( MEMBERSHIP_GOSSIP , gossipListener ) ;
public class CmsVfsSelection { /** * Opens the popup of this widget . < p > */ protected void open ( ) { } }
m_oldValue = m_selectionInput . m_textbox . getValue ( ) ; if ( m_popup == null ) { String title = org . opencms . gwt . client . Messages . get ( ) . key ( org . opencms . gwt . client . Messages . GUI_GALLERY_SELECT_DIALOG_TITLE_0 ) ; m_popup = new CmsFramePopup ( title , buildGalleryUrl ( ) ) ; m_popup . setCloseHandler ( new Runnable ( ) { public void run ( ) { String textboxValue = m_selectionInput . m_textbox . getText ( ) ; if ( ! m_oldValue . equals ( textboxValue ) ) { m_selectionInput . m_textbox . setValue ( "" , true ) ; m_selectionInput . m_textbox . setValue ( textboxValue , true ) ; } if ( m_previewHandlerRegistration != null ) { m_previewHandlerRegistration . removeHandler ( ) ; m_previewHandlerRegistration = null ; } m_selectionInput . m_textbox . setFocus ( true ) ; m_selectionInput . m_textbox . setCursorPos ( m_selectionInput . m_textbox . getText ( ) . length ( ) ) ; } } ) ; m_popup . setModal ( false ) ; m_popup . setId ( m_id ) ; m_popup . setWidth ( 717 ) ; m_popup . getFrame ( ) . setSize ( "705px" , "640px" ) ; m_popup . addDialogClose ( new Command ( ) { public void execute ( ) { close ( ) ; } } ) ; } else { m_popup . getFrame ( ) . setUrl ( buildGalleryUrl ( ) ) ; } m_popup . setAutoHideEnabled ( true ) ; m_popup . center ( ) ; if ( m_previewHandlerRegistration == null ) { m_previewHandlerRegistration = Event . addNativePreviewHandler ( new CloseEventPreviewHandler ( ) ) ; }
public class ResourceTable { /** * Update this record ( Always called from the record class ) . * @ exception DBException File exception . */ public void set ( Rec fieldList ) throws DBException { } }
this . restoreMainRecord ( ( Record ) fieldList , true ) ; super . set ( fieldList ) ;
public class DependencyListView { /** * Generate a table that contains the dependencies information with the column that match the configured filters * @ return Table */ public Table getTable ( ) { } }
final Table table = new Table ( getHeaders ( ) ) ; // Create row ( s ) per dependency for ( final Dependency dependency : dependencies ) { final List < String > licenseIds = dependency . getTarget ( ) . getLicenses ( ) ; // A dependency can have many rows if it has many licenses if ( licenseIds . isEmpty ( ) ) { table . addRow ( getDependencyCells ( dependency , DataModelFactory . createLicense ( "" , "" , "" , "" , "" ) ) ) ; } else { for ( final String licenseId : dependency . getTarget ( ) . getLicenses ( ) ) { final License license = getLicense ( licenseId ) ; table . addRow ( getDependencyCells ( dependency , license ) ) ; } } } return table ;
public class AbstractTableColumnSpec { /** * Traverses links from a proposition to a list of propositions . * @ param links the { @ link Link } s to traverse . * @ param proposition the { @ link Proposition } from which to start . Cannot be * < code > null < / code > . * @ param forwardDerivations map of propositions from raw data toward * derived propositions . * @ param backwardDerivations map of propositions from derived propositions * toward raw data . * @ param references a map of unique id to the corresponding proposition for * propositions that are referred to by other propositions . * @ param knowledgeSource the { @ link KnowledgeSource } . * @ return the list of { @ link Propositions } at the end of the traversals . * @ throws KnowledgeSourceReadException if an error occurred reading from * the knowledge source . */ List < Proposition > traverseLinks ( Link [ ] links , Proposition proposition , Map < Proposition , Set < Proposition > > forwardDerivations , Map < Proposition , Set < Proposition > > backwardDerivations , Map < UniqueId , Proposition > references , KnowledgeSourceCache ksCache ) { } }
return this . linkTraverser . traverseLinks ( links , proposition , forwardDerivations , backwardDerivations , references , ksCache ) ;
public class AbstractIndex { /** * Closes the shared reader . * @ throws IOException if an error occurs while closing the reader . */ protected synchronized void invalidateSharedReader ( ) throws IOException { } }
// also close the read - only reader if ( readOnlyReader != null ) { readOnlyReader . release ( ) ; readOnlyReader = null ; } // invalidate shared reader if ( sharedReader != null ) { sharedReader . release ( ) ; sharedReader = null ; }
public class JobsImpl { /** * Lists the jobs that have been created under the specified job schedule . * @ param jobScheduleId The ID of the job schedule from which you want to get a list of jobs . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < CloudJob > > listFromJobScheduleAsync ( final String jobScheduleId , final ListOperationCallback < CloudJob > serviceCallback ) { } }
return AzureServiceFuture . fromHeaderPageResponse ( listFromJobScheduleSinglePageAsync ( jobScheduleId ) , new Func1 < String , Observable < ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > > call ( String nextPageLink ) { return listFromJobScheduleNextSinglePageAsync ( nextPageLink , null ) ; } } , serviceCallback ) ;
public class FilesImpl { /** * Lists the files in a task ' s directory on its compute node . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; NodeFile & gt ; object if successful . */ public PagedList < NodeFile > listFromTaskNext ( final String nextPageLink ) { } }
ServiceResponseWithHeaders < Page < NodeFile > , FileListFromTaskHeaders > response = listFromTaskNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < NodeFile > ( response . body ( ) ) { @ Override public Page < NodeFile > nextPage ( String nextPageLink ) { return listFromTaskNextSinglePageAsync ( nextPageLink , null ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class BatchJobUploader { /** * Incrementally uploads a batch job ' s operations and returns the response . * @ param request the request to upload * @ param isLastRequest if the request is the last request in the sequence of uploads for the job * @ param batchJobUploadStatus the current upload status of the job */ public BatchJobUploadResponse uploadIncrementalBatchJobOperations ( final BatchJobMutateRequestInterface request , final boolean isLastRequest , BatchJobUploadStatus batchJobUploadStatus ) throws BatchJobException { } }
Preconditions . checkNotNull ( batchJobUploadStatus , "Null batch job upload status" ) ; Preconditions . checkNotNull ( batchJobUploadStatus . getResumableUploadUri ( ) , "No resumable session URI" ) ; // This reference is final because it is referenced below within an anonymous class . final BatchJobUploadStatus effectiveStatus ; if ( batchJobUploadStatus . getTotalContentLength ( ) == 0 ) { // If this is the first upload , then issue a request to get the resumable session URI from // Google Cloud Storage . URI uploadUri = initiateResumableUpload ( batchJobUploadStatus . getResumableUploadUri ( ) ) ; effectiveStatus = new BatchJobUploadStatus ( 0 , uploadUri ) ; } else { effectiveStatus = batchJobUploadStatus ; } // The process below follows the Google Cloud Storage guidelines for resumable // uploads of unknown size : // https : / / cloud . google . com / storage / docs / concepts - techniques # unknownresumables ByteArrayContent content = request . createBatchJobUploadBodyProvider ( ) . getHttpContent ( request , effectiveStatus . getTotalContentLength ( ) == 0 , isLastRequest ) ; try { content = postProcessContent ( content , effectiveStatus . getTotalContentLength ( ) == 0L , isLastRequest ) ; } catch ( IOException e ) { throw new BatchJobException ( "Failed to post-process the request content" , e ) ; } String requestXml = null ; Throwable exception = null ; BatchJobUploadResponse batchJobUploadResponse = null ; final long contentLength = content . getLength ( ) ; try { HttpRequestFactory requestFactory = httpTransport . createRequestFactory ( req -> { HttpHeaders headers = createHttpHeaders ( ) ; headers . setContentLength ( contentLength ) ; headers . setContentRange ( constructContentRangeHeaderValue ( contentLength , isLastRequest , effectiveStatus ) ) ; req . setHeaders ( headers ) ; req . setLoggingEnabled ( true ) ; } ) ; // Incremental uploads require a PUT request . HttpRequest httpRequest = requestFactory . buildPutRequest ( new GenericUrl ( effectiveStatus . getResumableUploadUri ( ) ) , content ) ; requestXml = Streams . readAll ( content . getInputStream ( ) , UTF_8 ) ; content . getInputStream ( ) . reset ( ) ; HttpResponse response = httpRequest . execute ( ) ; batchJobUploadResponse = new BatchJobUploadResponse ( response , effectiveStatus . getTotalContentLength ( ) + httpRequest . getContent ( ) . getLength ( ) , effectiveStatus . getResumableUploadUri ( ) ) ; return batchJobUploadResponse ; } catch ( HttpResponseException e ) { if ( e . getStatusCode ( ) == 308 ) { // 308 indicates that the upload succeeded . batchJobUploadResponse = new BatchJobUploadResponse ( new ByteArrayInputStream ( new byte [ 0 ] ) , e . getStatusCode ( ) , e . getStatusMessage ( ) , effectiveStatus . getTotalContentLength ( ) + contentLength , effectiveStatus . getResumableUploadUri ( ) ) ; return batchJobUploadResponse ; } exception = e ; throw new BatchJobException ( "Failed response status from batch upload URL." , e ) ; } catch ( IOException e ) { exception = e ; throw new BatchJobException ( "Problem sending data to batch upload URL." , e ) ; } finally { batchJobLogger . logUpload ( requestXml , effectiveStatus . getResumableUploadUri ( ) , batchJobUploadResponse , exception ) ; }
public class PasswordCipherUtil { private static byte [ ] xor ( byte [ ] bytes ) { } }
byte [ ] xor_bytes = null ; if ( bytes != null ) { xor_bytes = new byte [ bytes . length ] ; for ( int i = 0 ; i < bytes . length ; i ++ ) { xor_bytes [ i ] = ( byte ) ( XOR_MASK ^ bytes [ i ] ) ; } } return xor_bytes ;
public class PublicIPPrefixesInner { /** * Creates or updates a static or dynamic public IP prefix . * @ param resourceGroupName The name of the resource group . * @ param publicIpPrefixName The name of the public IP prefix . * @ param parameters Parameters supplied to the create or update public IP prefix operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < PublicIPPrefixInner > createOrUpdateAsync ( String resourceGroupName , String publicIpPrefixName , PublicIPPrefixInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , publicIpPrefixName , parameters ) . map ( new Func1 < ServiceResponse < PublicIPPrefixInner > , PublicIPPrefixInner > ( ) { @ Override public PublicIPPrefixInner call ( ServiceResponse < PublicIPPrefixInner > response ) { return response . body ( ) ; } } ) ;
public class EntryIterator { /** * Attempts to get the next element in the iteration . Please refer to { @ link AsyncIterator # getNext ( ) } for details . * If this method is invoked concurrently ( a second call is initiated prior to the previous call terminating ) the * state of the { @ link EntryIterator } will be corrupted . Consider using { @ link AsyncIterator # asSequential } . * @ return A CompletableFuture that , when completed , will contain a List of { @ link PageEntry } instances that are * next in the iteration , or null if no more items can be served . */ @ Override public CompletableFuture < List < PageEntry > > getNext ( ) { } }
if ( this . finished . get ( ) ) { return CompletableFuture . completedFuture ( null ) ; } TimeoutTimer timer = new TimeoutTimer ( this . fetchTimeout ) ; return locateNextPage ( timer ) . thenApply ( pageWrapper -> { // Remember this page ( for next time ) . this . lastPage . set ( pageWrapper ) ; List < PageEntry > result = null ; if ( pageWrapper != null ) { // Extract the intermediate results from the page . result = extractFromPage ( pageWrapper ) ; this . processedPageCount . incrementAndGet ( ) ; } // Check if we have reached the last page that could possibly contain some result . if ( result == null ) { this . finished . set ( true ) ; } return result ; } ) ;
public class PrefsTransformer { /** * Get transformer for type . * @ param property the property * @ return transform */ public static PrefsTransform lookup ( PrefsProperty property ) { } }
TypeMirror typeMirror = property . getElement ( ) . asType ( ) ; TypeName typeName = typeName ( typeMirror ) ; return lookup ( typeName ) ;
public class BigOpt2015 { /** * Evaluate ( ) method */ @ Override public void evaluate ( DoubleSolution solution ) { } }
List < List < Double > > s1 ; List < Double > s1Temp ; s1 = new ArrayList < > ( ) ; for ( int i = 0 ; i < dTypeG ; i ++ ) { s1Temp = new ArrayList < > ( ) ; for ( int j = 0 ; j < icaComponent . get ( 0 ) . size ( ) ; j ++ ) { s1Temp . add ( solution . getVariableValue ( i * ( icaComponent . get ( 0 ) . size ( ) ) + j ) ) ; } s1 . add ( s1Temp ) ; } List < List < Double > > x1 = multiplyWithOutAMP ( matrixA , s1 ) ; List < List < Double > > cor1 = correlation ( x1 , mixed ) ; double sum = 0.0 ; for ( int i = 0 ; i < icaComponent . size ( ) ; i ++ ) { for ( int j = 0 ; j < icaComponent . get ( i ) . size ( ) ; j ++ ) { sum += Math . pow ( icaComponent . get ( i ) . get ( j ) - s1 . get ( i ) . get ( j ) , 2 ) ; } } double obj1 = diagonal1 ( cor1 ) + diagonal2 ( cor1 ) ; double obj2 = sum / ( icaComponent . size ( ) * icaComponent . get ( 0 ) . size ( ) ) ; if ( obj1 > f1max ) { f1max = obj1 ; } if ( obj1 < f1min ) { f1min = obj1 ; } if ( obj2 > f1max ) { f1max = obj2 ; } if ( obj2 < f1min ) { f1min = obj2 ; } if ( scaling ) { obj2 = ( obj2 - f2min ) * ( f1max - f1min ) / ( f2max - f2min ) + f1min ; } solution . setObjective ( 0 , obj1 ) ; solution . setObjective ( 1 , obj2 ) ;
public class SolarTime { /** * used in test classes , too */ double getHighestElevationOfSun ( PlainDate date ) { } }
Moment noon = date . get ( this . transitAtNoon ( ) ) ; double jde = JulianDay . getValue ( noon , TimeScale . TT ) ; double decInRad = Math . toRadians ( this . getCalculator ( ) . getFeature ( jde , DECLINATION ) ) ; double latInRad = Math . toRadians ( this . latitude ) ; double sinElevation = // Extra term left out = > Math . cos ( Math . toRadians ( trueNoon ) ) : = 1.0 ( per definition ) Math . sin ( latInRad ) * Math . sin ( decInRad ) + Math . cos ( latInRad ) * Math . cos ( decInRad ) ; // Meeus ( 13.6) double result = Math . toDegrees ( Math . asin ( sinElevation ) ) ; if ( Double . isNaN ( result ) ) { throw new UnsupportedOperationException ( "Solar declination not supported by: " + this . getCalculator ( ) . name ( ) ) ; } return result ;
public class OperationBuilder { /** * Associate an input stream with the operation . Closing the input stream * is the responsibility of the caller . * @ param in the input stream . Cannot be { @ code null } * @ return a builder than can be used to continue building the operation */ public OperationBuilder addInputStream ( final InputStream in ) { } }
Assert . checkNotNullParam ( "in" , in ) ; if ( inputStreams == null ) { inputStreams = new ArrayList < InputStream > ( ) ; } inputStreams . add ( in ) ; return this ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcAxis2Placement ( ) { } }
if ( ifcAxis2PlacementEClass == null ) { ifcAxis2PlacementEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1117 ) ; } return ifcAxis2PlacementEClass ;
public class CertUtil { /** * 通过keyStore 获取私钥签名证书PrivateKey对象 * @ return */ public static PrivateKey getSignCertPrivateKey ( ) { } }
try { Enumeration < String > aliasenum = keyStore . aliases ( ) ; String keyAlias = null ; if ( aliasenum . hasMoreElements ( ) ) { keyAlias = aliasenum . nextElement ( ) ; } PrivateKey privateKey = ( PrivateKey ) keyStore . getKey ( keyAlias , SDKConfig . getConfig ( ) . getSignCertPwd ( ) . toCharArray ( ) ) ; return privateKey ; } catch ( KeyStoreException e ) { LogUtil . writeErrorLog ( "getSignCertPrivateKey Error" , e ) ; return null ; } catch ( UnrecoverableKeyException e ) { LogUtil . writeErrorLog ( "getSignCertPrivateKey Error" , e ) ; return null ; } catch ( NoSuchAlgorithmException e ) { LogUtil . writeErrorLog ( "getSignCertPrivateKey Error" , e ) ; return null ; }
public class SSLReadServiceContext { /** * Note , a separate thread is not spawned to handle the decryption . The asynchronous * behavior of this call will take place when the device side channel makes a * nonblocking IO call and the request is potentially moved to a separate thread . * The buffers potentially set from the calling application will be used to store the * output of the decrypted message . No read buffers are set in the device channel . It * will have the responsibility of allocating while we will have the responsibility of * releasing . * @ see com . ibm . wsspi . tcpchannel . TCPReadRequestContext # read ( long , TCPReadCompletedCallback , boolean , int ) */ @ Override public VirtualConnection read ( long numBytes , TCPReadCompletedCallback userCallback , boolean forceQueue , int timeout ) { } }
// Call the async read with a flag showing this was not done from a queued request . return read ( numBytes , userCallback , forceQueue , timeout , false ) ;
public class BasicAnnotationProcessor { /** * Returns the valid annotated elements contained in all of the deferred elements . If none are * found for a deferred element , defers it again . */ private ImmutableSetMultimap < Class < ? extends Annotation > , Element > validElements ( ImmutableMap < String , Optional < ? extends Element > > deferredElements , RoundEnvironment roundEnv ) { } }
ImmutableSetMultimap . Builder < Class < ? extends Annotation > , Element > deferredElementsByAnnotationBuilder = ImmutableSetMultimap . builder ( ) ; for ( Entry < String , Optional < ? extends Element > > deferredTypeElementEntry : deferredElements . entrySet ( ) ) { Optional < ? extends Element > deferredElement = deferredTypeElementEntry . getValue ( ) ; if ( deferredElement . isPresent ( ) ) { findAnnotatedElements ( deferredElement . get ( ) , getSupportedAnnotationClasses ( ) , deferredElementsByAnnotationBuilder ) ; } else { deferredElementNames . add ( ElementName . forTypeName ( deferredTypeElementEntry . getKey ( ) ) ) ; } } ImmutableSetMultimap < Class < ? extends Annotation > , Element > deferredElementsByAnnotation = deferredElementsByAnnotationBuilder . build ( ) ; ImmutableSetMultimap . Builder < Class < ? extends Annotation > , Element > validElements = ImmutableSetMultimap . builder ( ) ; Set < ElementName > validElementNames = new LinkedHashSet < ElementName > ( ) ; // Look at the elements we ' ve found and the new elements from this round and validate them . for ( Class < ? extends Annotation > annotationClass : getSupportedAnnotationClasses ( ) ) { // This should just call roundEnv . getElementsAnnotatedWith ( Class ) directly , but there is a bug // in some versions of eclipse that cause that method to crash . TypeElement annotationType = elements . getTypeElement ( annotationClass . getCanonicalName ( ) ) ; Set < ? extends Element > elementsAnnotatedWith = ( annotationType == null ) ? ImmutableSet . < Element > of ( ) : roundEnv . getElementsAnnotatedWith ( annotationType ) ; for ( Element annotatedElement : Sets . union ( elementsAnnotatedWith , deferredElementsByAnnotation . get ( annotationClass ) ) ) { if ( annotatedElement . getKind ( ) . equals ( PACKAGE ) ) { PackageElement annotatedPackageElement = ( PackageElement ) annotatedElement ; ElementName annotatedPackageName = ElementName . forPackageName ( annotatedPackageElement . getQualifiedName ( ) . toString ( ) ) ; boolean validPackage = validElementNames . contains ( annotatedPackageName ) || ( ! deferredElementNames . contains ( annotatedPackageName ) && validateElement ( annotatedPackageElement ) ) ; if ( validPackage ) { validElements . put ( annotationClass , annotatedPackageElement ) ; validElementNames . add ( annotatedPackageName ) ; } else { deferredElementNames . add ( annotatedPackageName ) ; } } else { TypeElement enclosingType = getEnclosingType ( annotatedElement ) ; ElementName enclosingTypeName = ElementName . forTypeName ( enclosingType . getQualifiedName ( ) . toString ( ) ) ; boolean validEnclosingType = validElementNames . contains ( enclosingTypeName ) || ( ! deferredElementNames . contains ( enclosingTypeName ) && validateElement ( enclosingType ) ) ; if ( validEnclosingType ) { validElements . put ( annotationClass , annotatedElement ) ; validElementNames . add ( enclosingTypeName ) ; } else { deferredElementNames . add ( enclosingTypeName ) ; } } } } return validElements . build ( ) ;
public class ByteBuffer { /** * method to append a part of a char array * @ param c char array to get part from * @ param off start index on the char array * @ param len length of the sequenz to get from array * @ throws IOException */ public void append ( char c [ ] , int off , int len ) throws IOException { } }
append ( new String ( c , off , len ) ) ;
public class SFStatement { /** * A helper method to build URL and cancel the SQL for exec * @ param sql sql statement * @ param mediaType media type * @ throws SnowflakeSQLException if failed to cancel the statement * @ throws SFException if statement is already closed */ private void cancelHelper ( String sql , String mediaType ) throws SnowflakeSQLException , SFException { } }
synchronized ( this ) { if ( isClosed ) { throw new SFException ( ErrorCode . INTERNAL_ERROR , "statement already closed" ) ; } } StmtUtil . StmtInput stmtInput = new StmtUtil . StmtInput ( ) ; stmtInput . setServerUrl ( session . getServerUrl ( ) ) . setSql ( sql ) . setMediaType ( mediaType ) . setRequestId ( requestId ) . setSessionToken ( session . getSessionToken ( ) ) . setServiceName ( session . getServiceName ( ) ) ; StmtUtil . cancel ( stmtInput ) ; synchronized ( this ) { /* * done with the remote execution of the query . set sequenceId to - 1 * and request id to null so that we don ' t try to abort it again upon * canceling . */ this . sequenceId = - 1 ; this . requestId = null ; }
public class PriorityStateManager { /** * return an array of short which contains the priority of state of monitored devices . * @ return short [ ] */ public short [ ] getDeviceStateNumberArray ( ) { } }
final short [ ] array = new short [ deviceStateMap . size ( ) ] ; int i = 0 ; for ( final Map . Entry < String , DevState > deviceStateEntry : deviceStateMap . entrySet ( ) ) { final DevState deviceState = deviceStateEntry . getValue ( ) ; array [ i ++ ] = ( short ) getPriorityForState ( deviceState ) ; } return array ;
public class PatchModuleInvalidationUtils { /** * Fills the Boyer Moore " bad character array " for the given pattern */ private static void computeBadByteSkipArray ( byte [ ] pattern , int [ ] badByteArray ) { } }
for ( int a = 0 ; a < ALPHABET_SIZE ; a ++ ) { badByteArray [ a ] = pattern . length ; } for ( int j = 0 ; j < pattern . length - 1 ; j ++ ) { badByteArray [ pattern [ j ] - Byte . MIN_VALUE ] = pattern . length - j - 1 ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "renditionFilter" , scope = GetRenditions . class ) public JAXBElement < String > createGetRenditionsRenditionFilter ( String value ) { } }
return new JAXBElement < String > ( _GetObjectOfLatestVersionRenditionFilter_QNAME , String . class , GetRenditions . class , value ) ;
public class OnesCounting { /** * This method calculates the fitness for a given genotype . */ private static Integer count ( final Genotype < BitGene > gt ) { } }
return gt . getChromosome ( ) . as ( BitChromosome . class ) . bitCount ( ) ;
public class AWSServiceCatalogClient { /** * Deletes the specified portfolio . * You cannot delete a portfolio if it was shared with you or if it has associated products , users , constraints , or * shared accounts . * @ param deletePortfolioRequest * @ return Result of the DeletePortfolio operation returned by the service . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ throws ResourceInUseException * A resource that is currently in use . Ensure that the resource is not in use and retry the operation . * @ throws TagOptionNotMigratedException * An operation requiring TagOptions failed because the TagOptions migration process has not been performed * for this account . Please use the AWS console to perform the migration process before retrying the * operation . * @ sample AWSServiceCatalog . DeletePortfolio * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / DeletePortfolio " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeletePortfolioResult deletePortfolio ( DeletePortfolioRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeletePortfolio ( request ) ;
public class CmsSubscriptionManager { /** * Returns the date when the resource was last visited by the user . < p > * @ param cms the current users context * @ param user the user to check the date * @ param resourcePath the name of the resource to check the date * @ return the date when the resource was last visited by the user * @ throws CmsException if something goes wrong */ public long getDateLastVisitedBy ( CmsObject cms , CmsUser user , String resourcePath ) throws CmsException { } }
CmsResource resource = cms . readResource ( resourcePath , CmsResourceFilter . ALL ) ; return m_securityManager . getDateLastVisitedBy ( cms . getRequestContext ( ) , getPoolName ( ) , user , resource ) ;
public class HTTaxinvoiceServiceImp { /** * ( non - Javadoc ) * @ see com . popbill . api . HTTaxinvoiceService # getJobState ( java . lang . String , java . lang . String ) */ @ Override public HTTaxinvoiceJobState getJobState ( String CorpNum , String JobID ) throws PopbillException { } }
return getJobState ( CorpNum , JobID , null ) ;
public class CmsLinkRewriter { /** * Separate method for copying locale relations . . < p > * This is necessary because the default copy mechanism does not copy locale relations . * @ throws CmsException if something goes wrong */ protected void copyLocaleRelations ( ) throws CmsException { } }
long start = System . currentTimeMillis ( ) ; List < CmsRelation > localeRelations = m_cms . readRelations ( CmsRelationFilter . ALL . filterType ( CmsRelationType . LOCALE_VARIANT ) ) ; for ( CmsRelation rel : localeRelations ) { if ( isInSources ( rel . getSourcePath ( ) ) && isInSources ( rel . getTargetPath ( ) ) ) { CmsResource newRelationSource = m_translationsById . get ( rel . getSourceId ( ) ) ; CmsResource newRelationTarget = m_translationsById . get ( rel . getTargetId ( ) ) ; if ( ( newRelationSource != null ) && ( newRelationTarget != null ) ) { try { m_cms . addRelationToResource ( newRelationSource , newRelationTarget , CmsRelationType . LOCALE_VARIANT . getName ( ) ) ; } catch ( CmsException e ) { LOG . error ( "Could not transfer locale relation: " + e . getLocalizedMessage ( ) , e ) ; } } else { LOG . warn ( "Could not transfer locale relation because source/target not found in copy: " + rel ) ; } } } long end = System . currentTimeMillis ( ) ; LOG . info ( "Copied locale relations, took " + ( end - start ) + "ms" ) ;
public class StringParam { /** * String parameter modifier < code > : contains < / code > */ public StringParam setContains ( boolean theContains ) { } }
myContains = theContains ; if ( myContains ) { setExact ( false ) ; setMissing ( null ) ; } return this ;
public class X509DefaultEntryConverter { /** * Apply default coversion for the given value depending on the oid * and the character range of the value . * @ param oid the object identifier for the DN entry * @ param value the value associated with it * @ return the ASN . 1 equivalent for the string value . */ public DERObject getConvertedValue ( DERObjectIdentifier oid , String value ) { } }
if ( value . length ( ) != 0 && value . charAt ( 0 ) == '#' ) { try { return convertHexEncoded ( value , 1 ) ; } catch ( IOException e ) { throw new RuntimeException ( "can't recode value for oid " + oid . getId ( ) , e ) ; } } else if ( oid . equals ( X509Name . EmailAddress ) ) { return new DERIA5String ( value ) ; } else if ( canBePrintable ( value ) ) { return new DERPrintableString ( value ) ; } else if ( canBeUTF8 ( value ) ) { return new DERUTF8String ( value ) ; } return new DERBMPString ( value ) ;
public class TldFernClassifier { /** * Computes the value of the specified fern at the specified location in the image . */ protected int computeFernValue ( float c_x , float c_y , float rectWidth , float rectHeight , TldFernDescription fern ) { } }
rectWidth -= 1 ; rectHeight -= 1 ; int desc = 0 ; for ( int i = 0 ; i < fern . pairs . length ; i ++ ) { Point2D_F32 p_a = fern . pairs [ i ] . a ; Point2D_F32 p_b = fern . pairs [ i ] . b ; float valA = interpolate . get_fast ( c_x + p_a . x * rectWidth , c_y + p_a . y * rectHeight ) ; float valB = interpolate . get_fast ( c_x + p_b . x * rectWidth , c_y + p_b . y * rectHeight ) ; desc *= 2 ; if ( valA < valB ) { desc += 1 ; } } return desc ;
public class UtilMath { /** * Check if value is between an interval . * @ param value The value to check . * @ param min The minimum value . * @ param max The maximum value . * @ return < code > true < / code > if between , < code > false < / code > else . */ public static boolean isBetween ( double value , double min , double max ) { } }
return Double . compare ( value , min ) >= 0 && Double . compare ( value , max ) <= 0 ;
public class NumberUtils { /** * Parse the given text into a number instance of the given target class , * using the given NumberFormat . Trims the input { @ code String } * before attempting to parse the number . * @ param text the text to convert * @ param targetClass the target class to parse into * @ param numberFormat the NumberFormat to use for parsing ( if { @ code null } , * this method falls back to { @ code parseNumber ( String , Class ) } ) * @ return the parsed number * @ throws IllegalArgumentException if the target class is not supported * ( i . e . not a standard Number subclass as included in the JDK ) * @ see java . text . NumberFormat # parse * @ see # convertNumberToTargetClass * @ see # parseNumber ( String , Class ) */ public static < T extends Number > T parseNumber ( String text , Class < T > targetClass , NumberFormat numberFormat ) { } }
if ( numberFormat != null ) { Assert . notNull ( text , "Text must not be null" ) ; Assert . notNull ( targetClass , "Target class must not be null" ) ; DecimalFormat decimalFormat = null ; boolean resetBigDecimal = false ; if ( numberFormat instanceof DecimalFormat ) { decimalFormat = ( DecimalFormat ) numberFormat ; if ( BigDecimal . class . equals ( targetClass ) && ! decimalFormat . isParseBigDecimal ( ) ) { decimalFormat . setParseBigDecimal ( true ) ; resetBigDecimal = true ; } } try { Number number = numberFormat . parse ( StringUtils . trimAllWhitespace ( text ) ) ; return convertNumberToTargetClass ( number , targetClass ) ; } catch ( ParseException ex ) { throw new IllegalArgumentException ( "Could not parse number: " + ex . getMessage ( ) ) ; } finally { if ( resetBigDecimal ) { decimalFormat . setParseBigDecimal ( false ) ; } } } else { return parseNumber ( text , targetClass ) ; }
public class PropertyDispatcher { /** * Adds a PropertySetter observer for properties . * @ param observer * @ param properties */ public void addObserver ( PropertySetter observer , String ... properties ) { } }
try { semaphore . acquire ( ) ; } catch ( InterruptedException ex ) { throw new IllegalArgumentException ( ex ) ; } try { addObserver ( observer ) ; for ( String p : properties ) { observers . addObserver ( p , observer ) ; } } finally { semaphore . release ( ) ; }
public class SessionManager { /** * Method createSession * Called by the HTTPSessionManager in order to create a session . The id provided is in advisory capability only i . e . If there is another webapp that is using this id , then we * can * reuse it . If not we need to create a new one . If the session id is in use by another webapp , then we need to reuse the requested sessionVersion . * @ param id * @ param sessionVersion * @ return Object */ public Object createSession ( String id , int sessionVersion , Object correlator ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { // LoggingUtil . SESSION _ LOGGER _ CORE . logp ( Level . FINE , methodClassName , // " createSession " , new Object [ ] { id , new Integer ( sessionVersion ) , // correlator } ) ; } ISession iSession = null ; if ( ( null == id ) || ! _store . idExists ( id , correlator ) ) { id = _idGenerator . getID ( ) ; iSession = _store . createSession ( id , correlator ) ; } else { iSession = _store . createSession ( id , correlator ) ; if ( sessionVersion != - 1 ) { iSession . setVersion ( sessionVersion ) ; } } iSession . incrementRefCount ( ) ; iSession . setMaxInactiveInterval ( _sessionTimeout ) ; _sessionEventDispatcher . sessionCreated ( iSession ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , "createSession" , "iSession = " + iSession ) ; } return iSession ;
public class H2GISFunctions { /** * Register a H2GIS java code function * @ param st SQL Statement * @ param function Function instance * @ param packagePrepend For OSGi environment only , use * Bundle - SymbolicName : Bundle - Version : * @ throws java . sql . SQLException */ public static void registerFunction ( Statement st , Function function , String packagePrepend ) throws SQLException { } }
registerFunction ( st , function , packagePrepend , true ) ;
public class Token { /** * Find the ManagedObject handled by this token . * @ return ManagedObject the underlying ManagedObject represented by the token . * @ throws ObjectManagerException */ public final ManagedObject getManagedObject ( ) throws ObjectManagerException { } }
// final String methodName = " getManagedObject " ; // if ( Tracing . isAnyTracingEnabled ( ) & & trace . isEntryEnabled ( ) ) // trace . entry ( this , // cclass , // methodName ) ; // Get the object if is already in memory . ManagedObject managedObject = null ; if ( managedObjectReference != null ) managedObject = ( ManagedObject ) managedObjectReference . get ( ) ; if ( managedObject == null ) { // See if we can avoid synchronizing . synchronized ( this ) { if ( managedObjectReference != null ) managedObject = ( ManagedObject ) managedObjectReference . get ( ) ; if ( managedObject == null ) { managedObject = objectStore . get ( this ) ; if ( managedObject != null ) { managedObject . owningToken = this ; // PM22584 Set the owning token first managedObjectReference = new java . lang . ref . WeakReference ( managedObject ) ; } // if ( managedObject ! = null ) . } // if ( managedObject = = null ) . } // synchronize ( this ) . } // if ( managedObject = = null ) . // if ( Tracing . isAnyTracingEnabled ( ) & & trace . isEntryEnabled ( ) ) // trace . exit ( this , // cclass , // methodName , // new Object [ ] { managedObject } ) ; return managedObject ;
public class IPAddressCompare { /** * Checks whether ipAddress is in network with specified subnet by comparing byte logical end values */ private static boolean compareByteValues ( byte [ ] network , byte [ ] subnet , byte [ ] ipAddress ) { } }
for ( int i = 0 ; i < network . length ; i ++ ) if ( ( network [ i ] & subnet [ i ] ) != ( ipAddress [ i ] & subnet [ i ] ) ) return false ; return true ;
public class MinMaxBinaryArrayDoubleEndedHeap { /** * Create a heap from an array of elements . The elements of the array are * not destroyed . The method has linear time complexity . * @ param < K > * the type of keys maintained by the heap * @ param array * an array of elements * @ return a heap * @ throws IllegalArgumentException * in case the array is null */ @ LinearTime public static < K > MinMaxBinaryArrayDoubleEndedHeap < K > heapify ( K [ ] array ) { } }
if ( array == null ) { throw new IllegalArgumentException ( "Array cannot be null" ) ; } if ( array . length == 0 ) { return new MinMaxBinaryArrayDoubleEndedHeap < K > ( ) ; } MinMaxBinaryArrayDoubleEndedHeap < K > h = new MinMaxBinaryArrayDoubleEndedHeap < K > ( array . length ) ; System . arraycopy ( array , 0 , h . array , 1 , array . length ) ; h . size = array . length ; for ( int i = array . length / 2 ; i > 0 ; i -- ) { h . fixdown ( i ) ; } return h ;
public class MeteredInputStream { /** * Find the lowest { @ link MeteredInputStream } in a chain of { @ link FilterInputStream } s . */ public static Optional < MeteredInputStream > findWrappedMeteredInputStream ( InputStream is ) { } }
if ( is instanceof FilterInputStream ) { try { Optional < MeteredInputStream > meteredInputStream = findWrappedMeteredInputStream ( FilterStreamUnpacker . unpackFilterInputStream ( ( FilterInputStream ) is ) ) ; if ( meteredInputStream . isPresent ( ) ) { return meteredInputStream ; } } catch ( IllegalAccessException iae ) { log . warn ( "Cannot unpack input stream due to SecurityManager." , iae ) ; // Do nothing , we can ' t unpack the FilterInputStream due to security restrictions } } if ( is instanceof MeteredInputStream ) { return Optional . of ( ( MeteredInputStream ) is ) ; } return Optional . absent ( ) ;
public class AbstractMonteCarloProduct { /** * / * ( non - Javadoc ) * @ see net . finmath . montecarlo . MonteCarloProduct # getValuesForModifiedData ( double , net . finmath . montecarlo . MonteCarloSimulationModel , java . util . Map ) */ @ Override public Map < String , Object > getValuesForModifiedData ( double evaluationTime , MonteCarloSimulationModel model , Map < String , Object > dataModified ) throws CalculationException { } }
MonteCarloSimulationModel modelModified = model . getCloneWithModifiedData ( dataModified ) ; return getValues ( evaluationTime , modelModified ) ;
public class CspResponseWriter { /** * Write needed attributes before the starting element will be closed by adding a trailing ' > ' character when calling e . g . * { @ link ResponseWriter # writeText } . * See { @ link ResponseWriter # startElement ( String , UIComponent ) } */ private void listenOnEndAttribute ( ) throws IOException { } }
if ( lastElement == null ) { return ; } // no nonce written - > do it if ( "script" . equalsIgnoreCase ( lastElement ) && LangUtils . isValueBlank ( lastNonce ) ) { getWrapped ( ) . writeAttribute ( "nonce" , cspState . getNonce ( ) , null ) ; } if ( lastEvents != null && ! lastEvents . isEmpty ( ) ) { String id = lastId ; // no id written - > generate a new one and write it // otherwise we can ' t identify the element for our scripts if ( LangUtils . isValueBlank ( id ) ) { id = lastElement . toLowerCase ( ) + "-" + UUID . randomUUID ( ) . toString ( ) ; getWrapped ( ) . writeAttribute ( "id" , id , null ) ; } // add current collected events to our state cspState . getEventHandlers ( ) . put ( id , lastEvents ) ; } reset ( ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcSolidAngleMeasure ( ) { } }
if ( ifcSolidAngleMeasureEClass == null ) { ifcSolidAngleMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 864 ) ; } return ifcSolidAngleMeasureEClass ;
public class CharOperation { /** * Answers the number of occurrences of the given character in the given array , 0 if any . < br > * < br > * For example : * < ol > * < li > * < pre > * toBeFound = ' b ' * array = { ' a ' , ' b ' , ' b ' , ' a ' , ' b ' , ' a ' } * result = & gt ; 3 * < / pre > * < / li > * < li > * < pre > * toBeFound = ' c ' * array = { ' a ' , ' b ' , ' b ' , ' a ' , ' b ' , ' a ' } * result = & gt ; 0 * < / pre > * < / li > * < / ol > * @ param toBeFound * the given character * @ param array * the given array * @ return the number of occurrences of the given character in the given array , 0 if any * @ throws NullPointerException * if array is null */ public static final int occurencesOf ( char toBeFound , char [ ] array ) { } }
int count = 0 ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( toBeFound == array [ i ] ) { count ++ ; } } return count ;
public class AggregationIterator { public boolean hasNext ( ) { } }
final int size = iterators . length ; for ( int i = 0 ; i < size ; i ++ ) { // As long as any of the iterators has a data point with a timestamp // that falls within our interval , we know we have at least one next . if ( ( timestamps [ size + i ] & TIME_MASK ) <= end_time ) { // LOG . debug ( " hasNext # " + ( size + i ) ) ; return true ; } } // LOG . debug ( " No hasNext ( return false ) " ) ; return false ;
public class CommerceCurrencyPersistenceImpl { /** * Returns all the commerce currencies . * @ return the commerce currencies */ @ Override public List < CommerceCurrency > findAll ( ) { } }
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class DataUtils { public static < T > List < T > getList ( final ContentValues [ ] values , final Class < T > klass ) { } }
final List < T > objects = new ArrayList < T > ( ) ; for ( int i = 0 ; i < values . length ; i ++ ) { final T object = getObject ( values [ i ] , klass ) ; if ( object != null ) { objects . add ( object ) ; } } return objects ;
public class ACLModel { /** * - - - - - Inherited methods - - - - - */ @ Override public boolean isAuthorizedForTransaction ( String subject , String activity ) throws CompatibilityException { } }
context . validateSubject ( subject ) ; context . validateActivity ( activity ) ; if ( ! activityPermissionsUT . containsKey ( subject ) ) { return false ; } return activityPermissionsUT . get ( subject ) . contains ( activity ) ;
public class DestinationChangeListener { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . trm . dlm . DestinationLocationChangeListener # destinationUnavailable ( com . ibm . ws . sib . utils . SIBUuid12 , com . ibm . ws . sib . trm . dlm . Capability ) */ public void destinationUnavailable ( SIBUuid12 destId , Capability capability ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "destinationUnavailable" , new Object [ ] { destId , capability } ) ; Set < SIBUuid8 > available = new HashSet < SIBUuid8 > ( ) ; // Empty set of available MEs . The destination is unavailable . Set < SIBUuid8 > unavailable = new HashSet < SIBUuid8 > ( ) ; // We ' ll build up this set through the logic below BaseDestinationHandler destinationHandler = ( BaseDestinationHandler ) _destinationManager . getDestinationInternal ( destId , false ) ; // TRM might pass in the UUID of a link here as it can ' t always tell the difference between links and destinations . // Only process the UUID if is is not a link . if ( destinationHandler != null && ! destinationHandler . isLink ( ) ) { // Check if the localisation should be deleted Set localitySet = getDestinationLocalitySet ( destinationHandler , capability ) ; synchronized ( destinationHandler ) { // d526250 - If a remote destination is deleted then its ME stopped we might // get a trm notification in . Meanwhile , admin has told us to delete the dest // already . If we dont do the following we end up performing cleanup on a // dest that no longer exists and we get nullpointers . if ( ! destinationHandler . isToBeDeleted ( ) ) { // Part 1 // We ' ll iterate over the set of transmission points for this destination // to build the list of localisations that we were sending to which are now unavailable Iterator < PtoPMessageItemStream > i = null ; i = destinationHandler . getLocalisationManager ( ) . getXmitQueueIterator ( ) ; // Drive the iterator while ( i . hasNext ( ) ) { PtoPMessageItemStream ptoPMessageItemStream = ( PtoPMessageItemStream ) i . next ( ) ; // Get the localising ME ' s uuid SIBUuid8 meUuid = ptoPMessageItemStream . getLocalizingMEUuid ( ) ; // Add this ME to the set of those that are unavailable unavailable . add ( meUuid ) ; if ( ( localitySet == null ) || ! localitySet . contains ( meUuid . toString ( ) ) ) { // The localisation is not known in WCCM or WLM , so mark it for deletion cleanupDestination ( ptoPMessageItemStream , destinationHandler , meUuid , capability ) ; } // eof null localitySet or meUuid unknown by admin } // eof while we have another XMIT itemstream to process // Part 2 // We now look through our list of MEs that we were receiving from ( i . e . remoteget ) // and add them to the list of unavailable destinations . Iterator < AnycastInputControl > it = destinationHandler . getAIControlAdapterIterator ( ) ; while ( it . hasNext ( ) ) unavailable . add ( it . next ( ) . getDMEUuid ( ) ) ; // note this could add MEs that we have previously already handled deletion for if ( capability != Capability . GET ) { // Drive reallocation destinationHandler . requestReallocation ( ) ; } } // eof ! isToBeDeleted } // eof synchronized on DestinationHandler } // eof destinationHandler not null // To preserve existing behaviour , if the destinationHandler is null we assume the destination is not a link . final boolean isNotLink = destinationHandler == null || ( destinationHandler != null && ! destinationHandler . isLink ( ) ) ; if ( isNotLink ) { // Iterate over the registered MP listeners and alert them to the location change for ( int i = 0 ; i < _destinationChangeListeners . size ( ) ; i ++ ) { MPDestinationChangeListener listener = _destinationChangeListeners . get ( i ) ; // In this delegating call the available set should be empty , while the unavailable set will // comprise those ME uuids that we collected above . if ( listener != null ) listener . destinationLocationChange ( destId , available , unavailable , capability ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "destinationUnavailable" ) ; return ;
public class DirectoryWalker { /** * Start the directory analysis . */ public ISynchronizationPoint < IOException > start ( byte priority , WorkProgress progress , long work ) { } }
JoinPoint < IOException > jp = new JoinPoint < > ( ) ; jp . addToJoin ( 1 ) ; processDirectory ( "" , root , rootObject , jp , priority , progress , work ) ; jp . start ( ) ; return jp ;
public class SonarBuilder { /** * Parses the task status as returned from Sonar ' s CE API * @ param ceTaskResponse String * @ return value of status element in the CE API Response * @ throws ParseException org . json . simple . parser . ParseException */ private String getSonarTaskStatus ( String ceTaskResponse ) throws org . json . simple . parser . ParseException { } }
JSONObject ceTaskResponseObject = ( JSONObject ) new org . json . simple . parser . JSONParser ( ) . parse ( ceTaskResponse ) ; JSONObject task = ( JSONObject ) ceTaskResponseObject . get ( "task" ) ; return str ( task , "status" ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcProjectOrderTypeEnum ( ) { } }
if ( ifcProjectOrderTypeEnumEEnum == null ) { ifcProjectOrderTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 875 ) ; } return ifcProjectOrderTypeEnumEEnum ;
public class GetStringComparator { /** * Get a chunk of the string that is all digits or all not digits . Length * of string is passed in for improved efficiency ( calculate once ) . * @ param string the string being chunked * @ param slength the length of the string * @ param marker the starting point for processing * @ return the chunk */ private String getChunk ( final String string , final int slength , final int marker ) { } }
if ( isDigit ( string . charAt ( marker ) ) ) { return getNumericChunk ( string , slength , marker ) ; } else { return getTextChunk ( string , slength , marker ) ; }
public class VelocityParser { /** * Get the Velocity method parameters ( including < code > ( < / code > and < code > ) < / code > ) . * @ param array the source to parse * @ param currentIndex the current index in the < code > array < / code > * @ param velocityBlock the buffer where to append matched velocity block * @ param context the parser context to put some informations * @ return the index in the < code > array < / code > after the matched block */ public int getMethodParameters ( char [ ] array , int currentIndex , StringBuffer velocityBlock , VelocityParserContext context ) { } }
return getParameters ( array , currentIndex , velocityBlock , ')' , context ) ;
public class CmsObject { /** * Reads all resources with the given resource id . < p > * @ param resourceId the resource id for which we want the siblings * @ param filter the resource filter used to read the resources * @ return the siblings which share the given resource id * @ throws CmsException if something goes wrong */ public List < CmsResource > readSiblingsForResourceId ( CmsUUID resourceId , CmsResourceFilter filter ) throws CmsException { } }
CmsResource pseudoResource = new CmsResource ( null , resourceId , null , 0 , false , 0 , null , null , 0 , null , 0 , null , 0 , 0 , 0 , 0 , 0 , 0 ) ; return readSiblings ( pseudoResource , filter ) ;
public class Box { /** * @ description * Authenticates and decrypts the given box with peer ' s public key , * our secret key , and the given nonce . * Returns the original message , or null if authentication fails . */ public byte [ ] open ( byte [ ] box ) { } }
if ( box == null ) return null ; // prepare shared key if ( this . sharedKey == null ) before ( ) ; return open_after ( box , 0 , box . length ) ;
public class RestQueryHandlerImpl { /** * Executes a the given query string ( based on the language information ) against a JCR repository , returning a rest model * based result . * @ param request a non - null { @ link Request } * @ param repositoryName a non - null , URL encoded { @ link String } representing the name of a repository * @ param workspaceName a non - null , URL encoded { @ link String } representing the name of a workspace * @ param language a non - null String which should be a valid query language , as recognized by the * { @ link javax . jcr . query . QueryManager } * @ param statement a non - null String which should be a valid query string in the above language . * @ param offset a numeric value which indicates the index in the result set from where results should be returned . * @ param limit a numeric value indicating the maximum number of rows to return . * @ return a response containing the string representation of the query plan * @ throws javax . jcr . RepositoryException if any operation fails at the JCR level */ @ Override public RestQueryPlanResult planQuery ( Request request , String repositoryName , String workspaceName , String language , String statement , long offset , long limit ) throws RepositoryException { } }
assert repositoryName != null ; assert workspaceName != null ; assert language != null ; assert statement != null ; Session session = getSession ( request , repositoryName , workspaceName ) ; org . modeshape . jcr . api . query . Query query = createQuery ( language , statement , session ) ; bindExtraVariables ( request , session . getValueFactory ( ) , query ) ; org . modeshape . jcr . api . query . QueryResult result = query . explain ( ) ; String plan = result . getPlan ( ) ; return new RestQueryPlanResult ( plan , statement , language , query . getAbstractQueryModelRepresentation ( ) ) ;
public class ModelSerializer { /** * Write a model to a file path * @ param model the model to write * @ param path the path to write to * @ param saveUpdater whether to save the updater * or not * @ throws IOException */ public static void writeModel ( @ NonNull Model model , @ NonNull String path , boolean saveUpdater ) throws IOException { } }
try ( BufferedOutputStream stream = new BufferedOutputStream ( new FileOutputStream ( path ) ) ) { writeModel ( model , stream , saveUpdater ) ; }
public class CustomViewPager { /** * Determine whether the distance between the user ' s ACTION _ DOWN * event ( x1 , y1 ) and the current ACTION _ MOVE event ( x2 , y2 ) should * be interpreted as a horizontal swipe . * @ param x1 * @ param y1 * @ param x2 * @ param y2 * @ return */ private boolean isScrollingHorizontal ( float x1 , float y1 , float x2 , float y2 ) { } }
float deltaX = x2 - x1 ; float deltaY = y2 - y1 ; if ( Math . abs ( deltaX ) > mTouchSlop && Math . abs ( deltaX ) > Math . abs ( deltaY ) ) { return true ; } return false ;
public class StringUtils { /** * Returns the number of times the specified character was found * in the target string , or 0 if there is no specified character . * @ param chars the target string * @ param c the character to find * @ return the number of times the specified character was found */ public static int search ( CharSequence chars , char c ) { } }
int count = 0 ; for ( int i = 0 ; i < chars . length ( ) ; i ++ ) { if ( chars . charAt ( i ) == c ) { count ++ ; } } return count ;
public class CmsLoginUserAgreement { /** * Initializes the ' accepted ' data from the current user . < p > * Returns the absolute path in the OpenCms VFS to the user agreement configuration file . < p > */ protected void initAcceptData ( ) { } }
// read the current users agreement values CmsUser user = getCms ( ) . getRequestContext ( ) . getCurrentUser ( ) ; String result = ( String ) user . getAdditionalInfo ( CmsUserSettings . LOGIN_USERAGREEMENT_ACCEPTED ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( result ) ) { // read JSON data structure that is stored in the user additional info try { JSONObject jsonData = new JSONObject ( result ) ; m_acceptedVersion = jsonData . getDouble ( KEY_ACCEPTED_VERSION ) ; m_acceptedCount = jsonData . getInt ( KEY_ACCEPTED_COUNT ) ; } catch ( JSONException e ) { LOG . error ( e ) ; } }
public class MatrixVectorReader { /** * Reads a pattern matrix */ public void readPattern ( int [ ] row , int [ ] column ) throws IOException { } }
int size = row . length ; if ( size != column . length ) throw new IllegalArgumentException ( "All arrays must be of the same size" ) ; for ( int i = 0 ; i < size ; ++ i ) { row [ i ] = getInt ( ) ; column [ i ] = getInt ( ) ; }
public class RenameHandler { /** * Lookup an enum from a name , handling renames . * @ param < T > the type of the desired enum * @ param type the enum type , not null * @ param name the name of the enum to lookup , not null * @ return the enum value , not null * @ throws IllegalArgumentException if the name is not a valid enum constant */ public < T extends Enum < T > > T lookupEnum ( Class < T > type , String name ) { } }
if ( type == null ) { throw new IllegalArgumentException ( "type must not be null" ) ; } if ( name == null ) { throw new IllegalArgumentException ( "name must not be null" ) ; } Map < String , Enum < ? > > map = getEnumRenames ( type ) ; Enum < ? > value = map . get ( name ) ; if ( value != null ) { return type . cast ( value ) ; } return Enum . valueOf ( type , name ) ;
public class JobHistoryService { /** * Returns a list of { @ link Flow } instances generated from the given results . * For the moment , this assumes that the given scanner provides results * ordered first by flow ID . * @ param scan the Scan instance setup for retrieval * @ return */ private List < Flow > createFromResults ( Scan scan , boolean populateTasks , int maxCount ) throws IOException { } }
List < Flow > flows = new ArrayList < Flow > ( ) ; ResultScanner scanner = null ; try { Stopwatch timer = new Stopwatch ( ) . start ( ) ; Stopwatch timerJob = new Stopwatch ( ) ; int rowCount = 0 ; long colCount = 0 ; long resultSize = 0 ; int jobCount = 0 ; Table historyTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_TABLE ) ) ; scanner = historyTable . getScanner ( scan ) ; Flow currentFlow = null ; for ( Result result : scanner ) { if ( result != null && ! result . isEmpty ( ) ) { rowCount ++ ; colCount += result . size ( ) ; // TODO dogpiledays resultSize + = result . getWritableSize ( ) ; JobKey currentKey = jobKeyConv . fromBytes ( result . getRow ( ) ) ; // empty runId is special cased - - we need to treat each job as it ' s // own flow if ( currentFlow == null || ! currentFlow . contains ( currentKey ) || currentKey . getRunId ( ) == 0 ) { // return if we ' ve already hit the limit if ( flows . size ( ) >= maxCount ) { break ; } currentFlow = new Flow ( new FlowKey ( currentKey ) ) ; flows . add ( currentFlow ) ; } timerJob . start ( ) ; JobDetails job = new JobDetails ( currentKey ) ; job . populate ( result ) ; currentFlow . addJob ( job ) ; jobCount ++ ; timerJob . stop ( ) ; } } historyTable . close ( ) ; timer . stop ( ) ; LOG . info ( "Fetched from hbase " + rowCount + " rows, " + colCount + " columns, " + flows . size ( ) + " flows and " + jobCount + " jobs taking up " + resultSize + " bytes ( " + resultSize / ( 1024.0 * 1024.0 ) + " atomic double: " + new AtomicDouble ( resultSize / ( 1024.0 * 1024.0 ) ) + ") MB, in total time of " + timer + " with " + timerJob + " spent inJobDetails & Flow population" ) ; // export the size of data fetched from hbase as a metric HravenResponseMetrics . FLOW_HBASE_RESULT_SIZE_VALUE . set ( resultSize / ( 1024.0 * 1024.0 ) ) ; } finally { if ( scanner != null ) { scanner . close ( ) ; } } if ( populateTasks ) { populateTasks ( flows ) ; } return flows ;
public class AbstractDocumentationMojo { /** * Execute the mojo on the given set of files . * @ param sourceFolder the source folder . * @ param inputFile the input file . * @ param relativeInputFile the name of the input file relatively to the source folder . * @ param outputFolder the output folder . * @ param parser the parser to be used for reading the input file . * @ throws IOException if there is some issue with IO . */ @ SuppressWarnings ( "static-method" ) protected void internalExecute ( File sourceFolder , File inputFile , File relativeInputFile , File outputFolder , AbstractMarkerLanguageParser parser ) throws IOException { } }
throw new UnsupportedOperationException ( ) ;
public class AlternativesImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case SimpleAntlrPackage . ALTERNATIVES__GROUPS : return groups != null && ! groups . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class AuthConfig { /** * Returns a properties object which is converted to { @ code T } . */ @ Nullable public < T > T properties ( Class < T > clazz ) throws JsonProcessingException { } }
return properties != null ? Jackson . treeToValue ( properties , clazz ) : null ;
public class WebJsJmsMessageEncoderImpl { /** * Encode a text message body */ private void encodeTextBody ( StringBuffer result , JsJmsTextMessage msg ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "encodeTextBody" ) ; try { String body = msg . getText ( ) ; if ( body != null ) { result . append ( '~' ) ; URLEncode ( result , body ) ; } } catch ( UnsupportedEncodingException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.mfp.impl.WebJsJmsMessageEncoderImpl.encodeTextBody" , "193" ) ; // Just treat it as null . } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "encodeTextBody" ) ;
public class WebSphereCDIDeploymentImpl { /** * Do any of the specified BDAs , or any of BDAs accessible by them , have any beans * BDAs for Runtime Extensions are ignored * @ param bdas * @ return * @ throws CDIException */ private boolean isCDIEnabled ( Collection < WebSphereBeanDeploymentArchive > bdas ) { } }
boolean anyHasBeans = false ; for ( WebSphereBeanDeploymentArchive bda : bdas ) { boolean hasBeans = false ; if ( bda . getType ( ) != ArchiveType . RUNTIME_EXTENSION ) { hasBeans = isCDIEnabled ( bda ) ; } anyHasBeans = anyHasBeans || hasBeans ; if ( anyHasBeans ) { break ; } } return anyHasBeans ;
public class CircularByteBuffer { /** * Gets as many of the requested bytes as available from this buffer . * @ return number of bytes actually got from this buffer ( 0 if no bytes are available ) */ public synchronized int get ( byte [ ] dst , int off , int len ) { } }
if ( available == 0 ) { return 0 ; } // limit is last index to read + 1 int limit = idxGet < idxPut ? idxPut : capacity ; int count = Math . min ( limit - idxGet , len ) ; System . arraycopy ( buffer , idxGet , dst , off , count ) ; idxGet += count ; if ( idxGet == capacity ) { // Array end reached , check if we have more int count2 = Math . min ( len - count , idxPut ) ; if ( count2 > 0 ) { System . arraycopy ( buffer , 0 , dst , off + count , count2 ) ; idxGet = count2 ; count += count2 ; } else { idxGet = 0 ; } } available -= count ; return count ;
public class CPOptionValueLocalServiceUtil { /** * Returns the cp option value matching the UUID and group . * @ param uuid the cp option value ' s UUID * @ param groupId the primary key of the group * @ return the matching cp option value , or < code > null < / code > if a matching cp option value could not be found */ public static com . liferay . commerce . product . model . CPOptionValue fetchCPOptionValueByUuidAndGroupId ( String uuid , long groupId ) { } }
return getService ( ) . fetchCPOptionValueByUuidAndGroupId ( uuid , groupId ) ;
public class CryptoServiceSingleton { /** * Encrypt a String with the AES encryption advanced using ' AES / CBC / PKCS5Padding ' . Unlike the regular * encode / decode AES method using ECB ( Electronic Codebook ) , it uses Cipher - block chaining ( CBC ) . The private key * must have a length of 16 bytes , the salt and initialization vector must be valid hex Strings . * @ param value The message to encrypt * @ param privateKey The private key * @ param salt The salt ( hexadecimal String ) * @ param iv The initialization vector ( hexadecimal String ) * @ return encrypted String encoded using Base64 */ @ Override public String encryptAESWithCBC ( String value , String privateKey , String salt , String iv ) { } }
SecretKey genKey = generateAESKey ( privateKey , salt ) ; byte [ ] encrypted = doFinal ( Cipher . ENCRYPT_MODE , genKey , iv , value . getBytes ( UTF_8 ) ) ; return encodeBase64 ( encrypted ) ;
public class Geometry { /** * Center the columns of a matrix ( in - place ) . */ public static DoubleMatrix centerColumns ( DoubleMatrix x ) { } }
DoubleMatrix temp = new DoubleMatrix ( x . rows ) ; for ( int c = 0 ; c < x . columns ; c ++ ) x . putColumn ( c , center ( x . getColumn ( c , temp ) ) ) ; return x ;
public class AbstractJavaMetadata { /** * Extract the type name * @ param type - the type to be processed . This can be primitive , simple , parameterized . . . * @ return the name of a type . * @ throws IllegalArgumentException if type is null . */ private String getTypeName ( Type type ) { } }
CheckArg . isNotNull ( type , "type" ) ; if ( type . isPrimitiveType ( ) ) { PrimitiveType primitiveType = ( PrimitiveType ) type ; return primitiveType . getPrimitiveTypeCode ( ) . toString ( ) ; } if ( type . isSimpleType ( ) ) { SimpleType simpleType = ( SimpleType ) type ; return JavaMetadataUtil . getName ( simpleType . getName ( ) ) ; } if ( type . isParameterizedType ( ) ) { ParameterizedType parameterizedType = ( ParameterizedType ) type ; return getTypeName ( parameterizedType . getType ( ) ) ; } if ( type . isArrayType ( ) ) { ArrayType arrayType = ( ArrayType ) type ; // the element type is never an array type Type elementType = arrayType . getElementType ( ) ; if ( elementType . isPrimitiveType ( ) ) { return ( ( PrimitiveType ) elementType ) . getPrimitiveTypeCode ( ) . toString ( ) ; } // can ' t be an array type if ( elementType . isSimpleType ( ) ) { return JavaMetadataUtil . getName ( ( ( SimpleType ) elementType ) . getName ( ) ) ; } } return null ;
public class ConcreteEditingContext { /** * Mark for creation all objects that were included into dependent collections . * Mark for deletion all objects that were excluded from dependent collections . */ private ArrayList handleDependentCollections ( Identity oid , Object obj , Object [ ] origCollections , Object [ ] newCollections , Object [ ] newCollectionsOfObjects ) throws LockingException { } }
ClassDescriptor mif = _pb . getClassDescriptor ( obj . getClass ( ) ) ; Collection colDescs = mif . getCollectionDescriptors ( ) ; ArrayList newObjects = new ArrayList ( ) ; int count = 0 ; for ( Iterator it = colDescs . iterator ( ) ; it . hasNext ( ) ; count ++ ) { CollectionDescriptor cds = ( CollectionDescriptor ) it . next ( ) ; if ( cds . getOtmDependent ( ) ) { ArrayList origList = ( origCollections == null ? null : ( ArrayList ) origCollections [ count ] ) ; ArrayList newList = ( ArrayList ) newCollections [ count ] ; if ( origList != null ) { for ( Iterator it2 = origList . iterator ( ) ; it2 . hasNext ( ) ; ) { Identity origOid = ( Identity ) it2 . next ( ) ; if ( ( newList == null ) || ! newList . contains ( origOid ) ) { markDelete ( origOid , oid , true ) ; } } } if ( newList != null ) { int countElem = 0 ; for ( Iterator it2 = newList . iterator ( ) ; it2 . hasNext ( ) ; countElem ++ ) { Identity newOid = ( Identity ) it2 . next ( ) ; if ( ( origList == null ) || ! origList . contains ( newOid ) ) { ContextEntry entry = ( ContextEntry ) _objects . get ( newOid ) ; if ( entry == null ) { ArrayList relCol = ( ArrayList ) newCollectionsOfObjects [ count ] ; Object relObj = relCol . get ( countElem ) ; insertInternal ( newOid , relObj , LockType . WRITE_LOCK , true , null , new Stack ( ) ) ; newObjects . add ( newOid ) ; } } } } } } return newObjects ;
public class HttpProxyServiceHandler { /** * Helper method performing loop detection * @ param acceptSession - session parameter * @ return - whether a loop was detected or not */ private boolean validateNoLoopDetected ( DefaultHttpSession acceptSession ) { } }
List < String > viaHeaders = acceptSession . getReadHeaders ( HEADER_VIA ) ; if ( viaHeaders != null && viaHeaders . stream ( ) . anyMatch ( h -> h . equals ( viaHeader ) ) ) { LOGGER . warn ( "Connection to " + getConnectURIs ( ) . iterator ( ) . next ( ) + " failed due to loop detection [" + acceptSession + "->]" ) ; acceptSession . setStatus ( HttpStatus . SERVER_LOOP_DETECTED ) ; acceptSession . close ( true ) ; return false ; } return true ;
public class TaskRunner { /** * Corona will schedule tasks immediately after a failure , which can cause * the new task to potentially coincide with the failed task JVM still * running . Concurrent running JVMs can cause failure if they expect an * initial amount of memory available . As a temporary workaround , there * is an option to set a customizable delay to starting every task in * milliseconds based on the cluster / job configuration . * @ return Milliseconds of delay */ private static long delayStartingTask ( JobConf conf ) { } }
long delayMs = conf . getLong ( MAPREDUCE_TASK_DELAY_MS , MAPREDUCE_TASK_DELAY_MS_DEFAULT ) ; try { Thread . sleep ( delayMs ) ; } catch ( InterruptedException e ) { LOG . info ( "delayStartingTask: Unexpected interruption of " + delayMs + " ms" ) ; } return delayMs ;
public class HiveMetaStoreUtils { /** * First tries getting the { @ code FieldSchema } s from the { @ code HiveRegistrationUnit } ' s columns , if set . * Else , gets the { @ code FieldSchema } s from the deserializer . */ private static List < FieldSchema > getFieldSchemas ( HiveRegistrationUnit unit ) { } }
List < Column > columns = unit . getColumns ( ) ; List < FieldSchema > fieldSchemas = new ArrayList < > ( ) ; if ( columns != null && columns . size ( ) > 0 ) { fieldSchemas = getFieldSchemas ( columns ) ; } else { Deserializer deserializer = getDeserializer ( unit ) ; if ( deserializer != null ) { try { fieldSchemas = MetaStoreUtils . getFieldsFromDeserializer ( unit . getTableName ( ) , deserializer ) ; } catch ( SerDeException | MetaException e ) { LOG . warn ( "Encountered exception while getting fields from deserializer." , e ) ; } } } return fieldSchemas ;
public class Deployment { /** * < pre > * Manifest of the files stored in Google Cloud Storage that are included * as part of this version . All files must be readable using the * credentials supplied with this call . * < / pre > * < code > map & lt ; string , . google . appengine . v1 . FileInfo & gt ; files = 1 ; < / code > */ public com . google . appengine . v1 . FileInfo getFilesOrDefault ( java . lang . String key , com . google . appengine . v1 . FileInfo defaultValue ) { } }
if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } java . util . Map < java . lang . String , com . google . appengine . v1 . FileInfo > map = internalGetFiles ( ) . getMap ( ) ; return map . containsKey ( key ) ? map . get ( key ) : defaultValue ;
public class VaultsInner { /** * Create or update a key vault in the specified subscription . * @ param resourceGroupName The name of the Resource Group to which the server belongs . * @ param vaultName Name of the vault * @ param parameters Parameters to create or update the vault * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VaultInner object if successful . */ public VaultInner createOrUpdate ( String resourceGroupName , String vaultName , VaultCreateOrUpdateParameters parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , vaultName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;