signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FullSupportScriptEngine { /** * - - - - - Invocable */
@ Override public Object invokeMethod ( Object thiz , String name , Object ... args ) throws ScriptException , NoSuchMethodException { } } | return ( ( Invocable ) engine ) . invokeMethod ( thiz , name , args ) ; |
public class Import { /** * Attempt to filter out the keyvalue
* @ param kv { @ link org . apache . hadoop . hbase . KeyValue } on which to apply the filter
* @ return < tt > null < / tt > if the key should not be written , otherwise returns the original
* { @ link org . apache . hadoop . hbase . KeyValue }
* @ param filter a { @ link org . apache . hadoop . hbase . filter . Filter } object .
* @ throws java . io . IOException if any . */
public static Cell filterKv ( Filter filter , Cell kv ) throws IOException { } } | // apply the filter and skip this kv if the filter doesn ' t apply
if ( filter != null ) { Filter . ReturnCode code = filter . filterKeyValue ( kv ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "Filter returned:" + code + " for the key value:" + kv ) ; } // if its not an accept type , then skip this kv
if ( ! ( code . equals ( Filter . ReturnCode . INCLUDE ) || code . equals ( Filter . ReturnCode . INCLUDE_AND_NEXT_COL ) ) ) { return null ; } } return kv ; |
public class ProducerRequest { /** * read a producer request from buffer
* @ param buffer data buffer
* @ return parsed producer request */
public static ProducerRequest readFrom ( ByteBuffer buffer ) { } } | String topic = Utils . readShortString ( buffer ) ; int partition = buffer . getInt ( ) ; int messageSetSize = buffer . getInt ( ) ; ByteBuffer messageSetBuffer = buffer . slice ( ) ; messageSetBuffer . limit ( messageSetSize ) ; buffer . position ( buffer . position ( ) + messageSetSize ) ; return new ProducerRequest ( topic , partition , new ByteBufferMessageSet ( messageSetBuffer ) ) ; |
public class WrappingUtils { /** * Resets the rounding params on the specified rounded drawable , so that no rounding occurs . */
static void resetRoundingParams ( Rounded rounded ) { } } | rounded . setCircle ( false ) ; rounded . setRadius ( 0 ) ; rounded . setBorder ( Color . TRANSPARENT , 0 ) ; rounded . setPadding ( 0 ) ; rounded . setScaleDownInsideBorders ( false ) ; rounded . setPaintFilterBitmap ( false ) ; |
public class FhirServerConfigDstu2 { /** * Configure FHIR properties around the the JPA server via this bean */
@ Bean public DaoConfig daoConfig ( ) { } } | DaoConfig retVal = new DaoConfig ( ) ; retVal . setSubscriptionEnabled ( true ) ; retVal . setSubscriptionPollDelay ( 5000 ) ; retVal . setSubscriptionPurgeInactiveAfterMillis ( DateUtils . MILLIS_PER_HOUR ) ; retVal . setAllowMultipleDelete ( true ) ; return retVal ; |
public class ResponseBuilder { /** * Sets a { @ link AskForPermissionsConsentCard } card on the response .
* @ param permissions permission array
* @ return response builder */
public ResponseBuilder withAskForPermissionsConsentCard ( List < String > permissions ) { } } | this . card = AskForPermissionsConsentCard . builder ( ) . withPermissions ( permissions ) . build ( ) ; return this ; |
public class Gauge { /** * Defines if the sections will always be visible .
* This is currently only used in the IndicatorSkin
* @ param VISIBLE */
public void setSectionsAlwaysVisible ( final boolean VISIBLE ) { } } | if ( null == sectionsAlwaysVisible ) { _sectionsAlwaysVisible = VISIBLE ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { sectionsAlwaysVisible . set ( VISIBLE ) ; } |
public class WordVectorSerializer { /** * This method saves Word2Vec model into compressed zip file and sends it to output stream
* PLEASE NOTE : This method saves FULL model , including syn0 AND syn1 */
public static void writeWord2VecModel ( Word2Vec vectors , File file ) { } } | try ( FileOutputStream fos = new FileOutputStream ( file ) ; BufferedOutputStream stream = new BufferedOutputStream ( fos ) ) { writeWord2VecModel ( vectors , stream ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class CmsLocationSuggestOracle { /** * Adds a location suggestion to the list . < p >
* @ param suggestions the suggestions list
* @ param address the address */
private static void addSuggestion ( List < LocationSuggestion > suggestions , String address ) { } } | suggestions . add ( new LocationSuggestion ( address ) ) ; |
public class AwsUtils { /** * This method returns a complete signed request using HmaSHA256 algorithm
* using the " SignatureVersion 2 " scheme . Currently , this method creates
* " SignatureVersion 2 " based signed requests ONLY for the HTTP " GET "
* method . It ' s the callers the responsibility such as " Action " and such
* based on the REST API that they are trying to exercise . This method adds
* " SignatureVersion " , " SignatureMethod " , " AWSAccessKeyId " , " Timestamp " , and
* " Signature " parameters to the request .
* @ param requestMethod Only " GET at this point .
* @ param endpoint endpoint or the host
* @ param requestURI following the endpoint up until the query params
* @ param params Map of name - value pairs containing params such
* as " Action " , etc .
* @ param awsAccessKeyId AccessKeyId of the caller to create a signature
* @ param awsSecretKey SecretKey of the caller to create the signature
* @ return String the complete URL with proper encoding and the Signature
* query param appended
* @ throws java . security . SignatureException */
public static String getVersion2SignedRequest ( String requestMethod , String protocol , String endpoint , String requestURI , Map < String , String > params , String awsAccessKeyId , String awsSecretKey ) throws SignatureException { } } | if ( ( requestMethod == null ) || ( protocol == null ) || ( endpoint == null ) || ( requestURI == null ) || ( params == null ) || ( awsAccessKeyId == null ) || ( awsSecretKey == null ) ) { throw new IllegalArgumentException ( "Null parameter passed in" ) ; } params . put ( "AWSAccessKeyId" , awsAccessKeyId ) ; params . put ( "SignatureMethod" , HMAC_SHA256_ALGORITHM ) ; params . put ( "SignatureVersion" , "2" ) ; params . put ( "Timestamp" , getTimestamp ( ) ) ; String canonicalQS = getV2CanonicalizedQueryString ( params ) ; String stringToSign = requestMethod + "\n" + endpoint + "\n" + requestURI + "\n" + canonicalQS ; String signature = createSignature ( stringToSign , awsSecretKey , HMAC_SHA256_ALGORITHM ) ; String request = protocol + "://" + endpoint + requestURI + "?" + canonicalQS + "&Signature=" + signature ; return request ; |
public class FileEventStore { /** * Gets the directory for events in the given collection . Creates the directory ( and any
* necessary parents ) if it does not exist already .
* @ param projectId The project ID .
* @ param eventCollection The name of the event collection .
* @ return The directory for events in the collection . */
private File getCollectionDir ( String projectId , String eventCollection ) throws IOException { } } | File collectionDir = new File ( getProjectDir ( projectId , true ) , eventCollection ) ; if ( ! collectionDir . exists ( ) ) { KeenLogging . log ( "Cache directory for event collection '" + eventCollection + "' doesn't exist. Creating it." ) ; if ( ! collectionDir . mkdirs ( ) ) { throw new IOException ( "Could not create collection cache directory '" + collectionDir . getAbsolutePath ( ) + "'" ) ; } } return collectionDir ; |
public class CharacterRangeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case XtextPackage . CHARACTER_RANGE__LEFT : return getLeft ( ) ; case XtextPackage . CHARACTER_RANGE__RIGHT : return getRight ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class SQLFileWriter { /** * This method will process the given DiffTask and send it to the specified
* output .
* @ param task
* DiffTask
* @ throws ConfigurationException
* if problems occurred while initializing the components
* @ throws IOException
* if problems occurred while writing the output ( to file or
* archive )
* @ throws SQLConsumerException
* if problems occurred while writing the output ( to the sql
* producer database ) */
@ Override public void process ( final Task < Diff > task ) throws ConfigurationException , IOException , SQLConsumerException { } } | try { SQLEncoding [ ] encoding = sqlEncoder . encodeTask ( task ) ; for ( SQLEncoding sql : encoding ) { this . writer . write ( sql . getQuery ( ) + "\r\n" ) ; this . writer . flush ( ) ; } if ( task . getTaskType ( ) == TaskTypes . TASK_FULL || task . getTaskType ( ) == TaskTypes . TASK_PARTIAL_LAST ) { if ( this . sqlFile . length ( ) > LIMIT_SQL_FILE_SIZE ) { writeHeader ( ) ; } if ( ! MODE_STATISTICAL_OUTPUT ) { System . out . println ( task . toString ( ) ) ; } } else { System . out . println ( task . toString ( ) ) ; } } catch ( DecodingException e ) { throw ErrorFactory . createSQLConsumerException ( ErrorKeys . DIFFTOOL_SQLCONSUMER_FILEWRITER_EXCEPTION , e ) ; } catch ( EncodingException e ) { throw ErrorFactory . createSQLConsumerException ( ErrorKeys . DIFFTOOL_SQLCONSUMER_FILEWRITER_EXCEPTION , e ) ; } |
public class RTMPConnection { /** * Handle the incoming message .
* @ param packet
* incoming message packet */
public void handleMessageReceived ( Packet packet ) { } } | if ( log . isTraceEnabled ( ) ) { log . trace ( "handleMessageReceived - {}" , sessionId ) ; } // set the packet expiration time if maxHandlingTimeout is not disabled ( set to 0)
if ( maxHandlingTimeout > 0 ) { packet . setExpirationTime ( System . currentTimeMillis ( ) + maxHandlingTimeout ) ; } if ( executor != null ) { final byte dataType = packet . getHeader ( ) . getDataType ( ) ; // route these types outside the executor
switch ( dataType ) { case Constants . TYPE_PING : case Constants . TYPE_ABORT : case Constants . TYPE_BYTES_READ : case Constants . TYPE_CHUNK_SIZE : case Constants . TYPE_CLIENT_BANDWIDTH : case Constants . TYPE_SERVER_BANDWIDTH : // pass message to the handler
try { handler . messageReceived ( this , packet ) ; } catch ( Exception e ) { log . error ( "Error processing received message {}" , sessionId , e ) ; } break ; default : final String messageType = getMessageType ( packet ) ; try { // increment the packet number
final long packetNumber = packetSequence . incrementAndGet ( ) ; if ( executorQueueSizeToDropAudioPackets > 0 && currentQueueSize . get ( ) >= executorQueueSizeToDropAudioPackets ) { if ( packet . getHeader ( ) . getDataType ( ) == Constants . TYPE_AUDIO_DATA ) { // if there ' s a backlog of messages in the queue . Flash might have sent a burst of messages after a network congestion . Throw away packets that we are able to discard .
log . info ( "Queue threshold reached. Discarding packet: session=[{}], msgType=[{}], packetNum=[{}]" , sessionId , messageType , packetNumber ) ; return ; } } int streamId = packet . getHeader ( ) . getStreamId ( ) . intValue ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Handling message for streamId: {}, channelId: {} Channels: {}" , streamId , packet . getHeader ( ) . getChannelId ( ) , channels ) ; } // create a task to setProcessing the message
ReceivedMessageTask task = new ReceivedMessageTask ( sessionId , packet , handler , this ) ; task . setPacketNumber ( packetNumber ) ; // create a task queue
ReceivedMessageTaskQueue newStreamTasks = new ReceivedMessageTaskQueue ( streamId , this ) ; // put the queue in the task by stream map
ReceivedMessageTaskQueue currentStreamTasks = tasksByStreams . putIfAbsent ( streamId , newStreamTasks ) ; if ( currentStreamTasks != null ) { // add the task to the existing queue
currentStreamTasks . addTask ( task ) ; } else { // add the task to the newly created and just added queue
newStreamTasks . addTask ( task ) ; } } catch ( Exception e ) { log . error ( "Incoming message handling failed on session=[" + sessionId + "], messageType=[" + messageType + "]" , e ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Execution rejected on {} - {}" , sessionId , RTMP . states [ getStateCode ( ) ] ) ; log . debug ( "Lock permits - decode: {} encode: {}" , decoderLock . availablePermits ( ) , encoderLock . availablePermits ( ) ) ; } } } } else { log . debug ( "Executor is null on {} state: {}" , sessionId , RTMP . states [ getStateCode ( ) ] ) ; // pass message to the handler
try { handler . messageReceived ( this , packet ) ; } catch ( Exception e ) { log . error ( "Error processing received message {} state: {}" , sessionId , RTMP . states [ getStateCode ( ) ] , e ) ; } } |
public class CompositeFileSystem { /** * { @ inheritDoc } */
public Entry [ ] listEntries ( DirectoryEntry directory ) { } } | Map < String , Entry > result = new TreeMap < String , Entry > ( ) ; for ( FileSystem delegate : delegates ) { Entry [ ] entries = delegate . listEntries ( DefaultDirectoryEntry . equivalent ( delegate , directory ) ) ; if ( entries == null ) { continue ; } for ( Entry entry : entries ) { if ( result . containsKey ( entry . getName ( ) ) ) { continue ; } if ( entry instanceof DirectoryEntry ) { result . put ( entry . getName ( ) , new DefaultDirectoryEntry ( this , directory , entry . getName ( ) ) ) ; } else if ( entry instanceof FileEntry ) { result . put ( entry . getName ( ) , new LinkFileEntry ( this , directory , ( FileEntry ) entry ) ) ; } } } return ( Entry [ ] ) result . values ( ) . toArray ( new Entry [ result . size ( ) ] ) ; |
public class Response { /** * Trigger a browser redirect
* @ param location Where to redirect */
public void redirect ( String location ) { } } | if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Redirecting ({} {} to {}" , "Found" , HttpServletResponse . SC_FOUND , location ) ; } try { response . sendRedirect ( location ) ; } catch ( IOException ioException ) { LOG . warn ( "Redirect failure" , ioException ) ; } |
public class ResourceUtil { /** * First attempts to open open { @ code name } as a file . On fail attempts to
* open resource with name { @ code name } . On fail attempts to open { @ code name }
* as a URL .
* @ param name name of the file or resource to open
* @ return an input stream for reading the opened file or resource
* @ throws FileNotFoundException if all attempts fail */
public static InputStream getStream ( final String name ) throws FileNotFoundException { } } | if ( name == null ) { throw new IllegalArgumentException ( "'name' must not be null" ) ; } final File file = new File ( name ) ; if ( file . exists ( ) ) { return getStream ( file ) ; } InputStream stream = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( name ) ; if ( stream != null ) { return stream ; } try { stream = new URL ( name ) . openStream ( ) ; } catch ( final IOException e ) { throwFileNotFoundException ( name , e ) ; } if ( stream == null ) { throwFileNotFoundException ( name , null ) ; } return stream ; |
public class CompositeScore { /** * Returns a new CompositeScore with the filtering remainder replaced and
* covering matches recalculated . Other matches are not recalculated .
* @ since 1.2 */
public CompositeScore < S > withRemainderFilter ( Filter < S > filter ) { } } | return new CompositeScore < S > ( mFilteringScore . withRemainderFilter ( filter ) , mOrderingScore ) ; |
public class DatabaseThreatDetectionPoliciesInner { /** * Creates or updates a database ' s threat detection policy .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database for which database Threat Detection policy is defined .
* @ param parameters The database Threat Detection policy .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DatabaseSecurityAlertPolicyInner object */
public Observable < DatabaseSecurityAlertPolicyInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName , DatabaseSecurityAlertPolicyInner parameters ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . map ( new Func1 < ServiceResponse < DatabaseSecurityAlertPolicyInner > , DatabaseSecurityAlertPolicyInner > ( ) { @ Override public DatabaseSecurityAlertPolicyInner call ( ServiceResponse < DatabaseSecurityAlertPolicyInner > response ) { return response . body ( ) ; } } ) ; |
public class MutableChar { /** * Use the supplied function to perform a lazy transform operation when getValue is called
* < pre >
* { @ code
* MutableChar mutable = MutableChar . fromExternal ( ( ) - > ! this . value , val - > ! this . value ) ;
* Mutable < Character > withOverride = mutable . mapOutput ( b - > {
* if ( override )
* return ' a ' ;
* return b ;
* < / pre >
* @ param fn Map function to be applied to the result when getValue is called
* @ return Mutable that lazily applies the provided function when getValue is called to the return value */
public < R > Mutable < R > mapOutputToObj ( final Function < Character , R > fn ) { } } | final MutableChar host = this ; return new Mutable < R > ( ) { @ Override public R get ( ) { return fn . apply ( host . get ( ) ) ; } } ; |
public class Curve25519 { /** * / * Returns x if a contains the gcd , y if b .
* Also , the returned buffer contains the inverse of a mod b ,
* as 32 - byte signed .
* x and y must have 64 bytes space for temporary use .
* requires that a [ - 1 ] and b [ - 1 ] are valid memory locations */
private static final byte [ ] egcd32 ( byte [ ] x , byte [ ] y , byte [ ] a , byte [ ] b ) { } } | int an , bn = 32 , qn , i ; for ( i = 0 ; i < 32 ; i ++ ) x [ i ] = y [ i ] = 0 ; x [ 0 ] = 1 ; an = numsize ( a , 32 ) ; if ( an == 0 ) return y ; /* division by zero */
byte [ ] temp = new byte [ 32 ] ; while ( true ) { qn = bn - an + 1 ; divmod ( temp , b , bn , a , an ) ; bn = numsize ( b , bn ) ; if ( bn == 0 ) return x ; mula32 ( y , x , temp , qn , - 1 ) ; qn = an - bn + 1 ; divmod ( temp , a , an , b , bn ) ; an = numsize ( a , an ) ; if ( an == 0 ) return y ; mula32 ( x , y , temp , qn , - 1 ) ; } |
public class StreamConduit { /** * Stop pumping the streams . */
private void stop ( ) { } } | finish ( inputThread ) ; try { err . flush ( ) ; } catch ( IOException e ) { // ignore
} try { out . flush ( ) ; } catch ( IOException e ) { // ignore
} finish ( outputThread ) ; finish ( errorThread ) ; |
public class ThrowingBridge { /** * int */
public static PrimitiveIterator . OfInt of ( ThrowingIterator . OfInt < Nothing > itr ) { } } | return of ( itr , Nothing . class ) ; |
public class ProviderInfo { /** * Gets weight .
* @ return the weight */
public int getWeight ( ) { } } | ProviderStatus status = getStatus ( ) ; if ( status == ProviderStatus . WARMING_UP ) { try { // 还处于预热时间中
Integer warmUpWeight = ( Integer ) getDynamicAttr ( ProviderInfoAttrs . ATTR_WARMUP_WEIGHT ) ; if ( warmUpWeight != null ) { return warmUpWeight ; } } catch ( Exception e ) { return weight ; } } return weight ; |
public class JDBC4ResultSet { /** * ResultSet object as a String in the Java programming language . */
@ Override public String getString ( int columnIndex ) throws SQLException { } } | checkColumnBounds ( columnIndex ) ; try { VoltType type = table . getColumnType ( columnIndex - 1 ) ; if ( type == VoltType . STRING ) return table . getString ( columnIndex - 1 ) ; if ( type == VoltType . TIMESTAMP ) return getTimestamp ( columnIndex ) . toString ( ) ; if ( type == VoltType . VARBINARY ) return Encoder . hexEncode ( table . getVarbinary ( columnIndex - 1 ) ) ; return table . get ( columnIndex - 1 , type ) . toString ( ) ; } catch ( Exception x ) { throw SQLError . get ( x ) ; } |
public class DRL6Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 807:1 : not _ key : { . . . } ? = > id = ID ; */
public final void not_key ( ) throws RecognitionException { } } | Token id = null ; try { // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 808:5 : ( { . . . } ? = > id = ID )
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 808:12 : { . . . } ? = > id = ID
{ if ( ! ( ( ( helper . validateIdentifierKey ( DroolsSoftKeywords . NOT ) ) ) ) ) { if ( state . backtracking > 0 ) { state . failed = true ; return ; } throw new FailedPredicateException ( input , "not_key" , "(helper.validateIdentifierKey(DroolsSoftKeywords.NOT))" ) ; } id = ( Token ) match ( input , ID , FOLLOW_ID_in_not_key5173 ) ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { helper . emit ( id , DroolsEditorType . KEYWORD ) ; } } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving
} |
public class PdfContentByte { /** * Concatenate a matrix to the current transformation matrix .
* @ param a an element of the transformation matrix
* @ param b an element of the transformation matrix
* @ param c an element of the transformation matrix
* @ param d an element of the transformation matrix
* @ param e an element of the transformation matrix
* @ param f an element of the transformation matrix */
public void concatCTM ( float a , float b , float c , float d , float e , float f ) { } } | content . append ( a ) . append ( ' ' ) . append ( b ) . append ( ' ' ) . append ( c ) . append ( ' ' ) ; content . append ( d ) . append ( ' ' ) . append ( e ) . append ( ' ' ) . append ( f ) . append ( " cm" ) . append_i ( separator ) ; |
public class ConditionalFunctions { /** * Returned expression results in first non - MISSING , non - Inf number .
* Returns MISSING or NULL if a non - number input is encountered first . */
public static Expression ifInf ( Expression expression1 , Expression expression2 , Expression ... others ) { } } | return build ( "IFINF" , expression1 , expression2 , others ) ; |
public class InstallRemappedFileMojo { /** * Gets the path of the specified artifact within the local repository . Note that the returned path need not exist
* ( yet ) .
* @ param artifact The artifact whose local repo path should be determined , must not be < code > null < / code > .
* @ return The absolute path to the artifact when installed , never < code > null < / code > . */
protected File getLocalRepoFile ( Artifact artifact ) { } } | String path = localRepository . pathOf ( artifact ) ; return new File ( localRepository . getBasedir ( ) , path ) ; |
public class VirtualMachineScaleSetsInner { /** * Deletes virtual machines in a VM scale set .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ param instanceIds The virtual machine scale set instance ids .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatusResponseInner object if successful . */
public OperationStatusResponseInner deleteInstances ( String resourceGroupName , String vmScaleSetName , List < String > instanceIds ) { } } | return deleteInstancesWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceIds ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class InnerRankUpdate_DDRB { /** * Performs : < br >
* < br >
* A = A + & alpha ; B < sup > T < / sup > B
* @ param blockLength Size of the block in the block matrix .
* @ param alpha scaling factor for right hand side .
* @ param A Block aligned submatrix .
* @ param B Block aligned submatrix . */
public static void rankNUpdate ( int blockLength , double alpha , DSubmatrixD1 A , DSubmatrixD1 B ) { } } | int heightB = B . row1 - B . row0 ; if ( heightB > blockLength ) throw new IllegalArgumentException ( "Height of B cannot be greater than the block length" ) ; int N = B . col1 - B . col0 ; if ( A . col1 - A . col0 != N ) throw new IllegalArgumentException ( "A does not have the expected number of columns based on B's width" ) ; if ( A . row1 - A . row0 != N ) throw new IllegalArgumentException ( "A does not have the expected number of rows based on B's width" ) ; for ( int i = B . col0 ; i < B . col1 ; i += blockLength ) { int indexB_i = B . row0 * B . original . numCols + i * heightB ; int widthB_i = Math . min ( blockLength , B . col1 - i ) ; int rowA = i - B . col0 + A . row0 ; int heightA = Math . min ( blockLength , A . row1 - rowA ) ; for ( int j = B . col0 ; j < B . col1 ; j += blockLength ) { int widthB_j = Math . min ( blockLength , B . col1 - j ) ; int indexA = rowA * A . original . numCols + ( j - B . col0 + A . col0 ) * heightA ; int indexB_j = B . row0 * B . original . numCols + j * heightB ; InnerMultiplication_DDRB . blockMultPlusTransA ( alpha , B . original . data , B . original . data , A . original . data , indexB_i , indexB_j , indexA , heightB , widthB_i , widthB_j ) ; } } |
public class ConfigAdminServiceFactory { /** * ( non - Javadoc )
* @ see
* org . osgi . framework . ServiceFactory # getService ( org . osgi . framework . Bundle ,
* org . osgi . framework . ServiceRegistration ) */
@ Override public ConfigurationAdmin getService ( Bundle bundle , ServiceRegistration < ConfigurationAdmin > registration ) { } } | ServiceReference < ConfigurationAdmin > reference = registration . getReference ( ) ; ced . setServiceReference ( reference ) ; return new ConfigurationAdminImpl ( this , bundle ) ; |
public class DefaultWriteFutureEx { /** * Returns a new { @ link DefaultWriteFuture } which is already marked as ' not written ' . */
public static WriteFutureEx newNotWrittenFuture ( IoSession session , Throwable cause ) { } } | DefaultWriteFutureEx unwrittenFuture = new DefaultWriteFutureEx ( session ) ; unwrittenFuture . setException ( cause ) ; return unwrittenFuture ; |
public class Messages { /** * A list of errors that might have been generated from processes on this flow .
* @ param errors
* A list of errors that might have been generated from processes on this flow . */
public void setErrors ( java . util . Collection < String > errors ) { } } | if ( errors == null ) { this . errors = null ; return ; } this . errors = new java . util . ArrayList < String > ( errors ) ; |
public class PDPageContentStreamExt { /** * Set the non - stroking color in the DeviceRGB color space . Range is 0 . . 255.
* @ param r
* The red value .
* @ param g
* The green value .
* @ param b
* The blue value .
* @ throws IOException
* If an IO error occurs while writing to the stream .
* @ throws IllegalArgumentException
* If the parameters are invalid . */
public void setNonStrokingColor ( final int r , final int g , final int b ) throws IOException { } } | if ( _isOutside255Interval ( r ) || _isOutside255Interval ( g ) || _isOutside255Interval ( b ) ) { throw new IllegalArgumentException ( "Parameters must be within 0..255, but are (" + r + "," + g + "," + b + ")" ) ; } writeOperand ( r / 255f ) ; writeOperand ( g / 255f ) ; writeOperand ( b / 255f ) ; writeOperator ( ( byte ) 'r' , ( byte ) 'g' ) ; |
public class JsApiMessageImpl { /** * Add an item to the SystemContext under the given name .
* Javadoc description supplied by JsApiMessage interface . */
@ Override public void putSystemContextItem ( String name , Serializable item ) throws IllegalArgumentException , IOException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "putSystemContextItem" , new Object [ ] { name , item } ) ; /* If we have a non - null name */
if ( name != null ) { /* If we really have an item */
if ( item != null ) { /* If the item is of a JMS supported type , we can store it as is . */
if ( isValidForJms ( item ) ) { getSystemContextMap ( ) . put ( name , item ) ; } /* Otherwise , we need to take a safe copy & ' flatten it ' suitably . */
else { getSystemContextMap ( ) . put ( name , flattenMapObject ( item ) ) ; } } /* If item is null , just call deleteProperty */
else { getSystemContextMap ( ) . remove ( name ) ; } } /* A null name is invalid . */
else { throw new IllegalArgumentException ( "null" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "putSystemContextItem" ) ; |
public class AbstractCloseableIteratorCollection { /** * Copied from AbstractCollection since we need to close iterator */
@ Override public boolean retainAll ( Collection < ? > c ) { } } | boolean modified = false ; try ( CloseableIterator < O > it = iterator ( ) ) { while ( it . hasNext ( ) ) { if ( ! c . contains ( it . next ( ) ) ) { it . remove ( ) ; modified = true ; } } return modified ; } |
public class ProtobufIDLProxy { /** * Creates the .
* @ param reader the reader
* @ param debug the debug
* @ param path the path
* @ return the map
* @ throws IOException Signals that an I / O exception has occurred . */
public static Map < String , IDLProxyObject > create ( Reader reader , boolean debug , File path ) throws IOException { } } | return create ( reader , debug , path , true ) ; |
public class AddOnWrapper { /** * Sets the issues that the newer version of the wrapped add - on or its extensions might have that prevents them from being
* run .
* The contents should be in HTML .
* @ param updateIssues the running issues of the add - on or its extensions , empty if there ' s no issues .
* @ param addOnIssues { @ code true } if the issues are caused by the add - on , { @ code false } if are caused by the extensions
* @ since 2.4.0
* @ see # getUpdateIssues ( ) */
public void setUpdateIssues ( String updateIssues , boolean addOnIssues ) { } } | Validate . notNull ( updateIssues , "Parameter updateIssues must not be null." ) ; this . updateIssues = updateIssues ; this . addOnUpdateIssues = addOnIssues ; |
public class AWSCodePipelineClient { /** * Gets a listing of all the webhooks in this region for this account . The output lists all webhooks and includes
* the webhook URL and ARN , as well the configuration for each webhook .
* @ param listWebhooksRequest
* @ return Result of the ListWebhooks operation returned by the service .
* @ throws ValidationException
* The validation was specified in an invalid format .
* @ throws InvalidNextTokenException
* The next token was specified in an invalid format . Make sure that the next token you provided is the
* token returned by a previous call .
* @ sample AWSCodePipeline . ListWebhooks
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codepipeline - 2015-07-09 / ListWebhooks " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListWebhooksResult listWebhooks ( ListWebhooksRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListWebhooks ( request ) ; |
public class ListNumbers { /** * Tests whether the list contains a equally spaced numbers .
* Always returns true if the list was created with { @ link # linearList ( double , double , int ) }
* or { @ link # linearListFromRange ( double , double , int ) } . For all other cases ,
* takes the first and last value , creates a linearListFromRange , and checks
* whether the difference is greater than the precision allowed by double .
* Note that this method is really strict , and it may rule out cases
* that may be considered to be linear .
* @ param listNumber
* @ return true if the elements of the list are equally spaced */
public static boolean isLinear ( ListNumber listNumber ) { } } | if ( listNumber instanceof LinearListDouble || listNumber instanceof LinearListDoubleFromRange ) { return true ; } ListDouble diff = ListMath . subtract ( listNumber , linearListFromRange ( listNumber . getDouble ( 0 ) , listNumber . getDouble ( listNumber . size ( ) - 1 ) , listNumber . size ( ) ) ) ; for ( int i = 0 ; i < diff . size ( ) ; i ++ ) { if ( Math . abs ( diff . getDouble ( i ) ) > Math . ulp ( listNumber . getDouble ( i ) ) ) { return false ; } } return true ; |
public class UtilityElf { /** * Create a ThreadPoolExecutor .
* @ param queueSize the queue size
* @ param threadName the thread name
* @ param threadFactory an optional ThreadFactory
* @ param policy the RejectedExecutionHandler policy
* @ return a ThreadPoolExecutor */
public static ThreadPoolExecutor createThreadPoolExecutor ( final int queueSize , final String threadName , ThreadFactory threadFactory , final RejectedExecutionHandler policy ) { } } | if ( threadFactory == null ) { threadFactory = new DefaultThreadFactory ( threadName , true ) ; } LinkedBlockingQueue < Runnable > queue = new LinkedBlockingQueue < > ( queueSize ) ; ThreadPoolExecutor executor = new ThreadPoolExecutor ( 1 /* core */
, 1 /* max */
, 5 /* keepalive */
, SECONDS , queue , threadFactory , policy ) ; executor . allowCoreThreadTimeOut ( true ) ; return executor ; |
public class Timestamp { /** * clean up timestamp argument assuming earliest possible values for missing
* or bogus digits .
* @ param timestamp String
* @ return String */
public static String padStartDateStr ( String timestamp ) { } } | return boundTimestamp ( padDigits ( timestamp , LOWER_TIMESTAMP_LIMIT , UPPER_TIMESTAMP_LIMIT , LOWER_TIMESTAMP_LIMIT ) ) ; |
public class OnlineVariance { /** * Adds a value to this variance . If the time between the previous addition
* and this addition exceeds the age gap for this object , then the variance
* will be reset to 0 before this value is added .
* @ param value
* the value to add .
* @ return the current computed online variance , or 0 if none is available
* ( e . g . , the initial value was added or a value was added after the
* age gap passed ) */
public float addValue ( final float value ) { } } | long now = System . currentTimeMillis ( ) ; // If more than 15 seconds passed then clear the data since it is
// too old at this point .
if ( now - this . last_time > this . ageGap ) { this . reset ( ) ; } this . last_time = now ; if ( this . sizeHistory < this . maxHistory ) { this . sum += value ; this . sum_squares += Math . pow ( value , 2 ) ; ++ this . sizeHistory ; } else { Float oldest_val = this . history . poll ( ) ; if ( oldest_val != null ) { this . sum = this . sum - oldest_val . floatValue ( ) + value ; this . sum_squares = this . sum_squares - ( float ) Math . pow ( oldest_val . floatValue ( ) , 2 ) + ( float ) Math . pow ( value , 2 ) ; } else { log . warn ( "Could not poll history to update variance." ) ; } } this . history . offer ( Float . valueOf ( value ) ) ; float degrees_of_freedom = this . sizeHistory - 1 ; if ( degrees_of_freedom < 1.0 ) { return 0f ; } this . currentVariance = ( this . sum_squares - ( ( float ) Math . pow ( this . sum , 2 ) / this . sizeHistory ) ) / degrees_of_freedom ; return this . currentVariance ; |
public class ScanningQueryEngine { /** * Compute the columns that are defined in the supplied { @ link PlanNode plan node } . If the supplied plan node is not a
* { @ link Type # PROJECT project node } , the method finds the first PROJECT node below the given node .
* @ param optimizedPlan the optimized plan node in a query plan ; may not be null
* @ param context the query context ; may not be null
* @ return the representation of the projected columns ; never null */
protected Columns determineProjectedColumns ( PlanNode optimizedPlan , final ScanQueryContext context ) { } } | final PlanHints hints = context . getHints ( ) ; // Look for which columns to include in the results ; this will be defined by the highest PROJECT node . . .
PlanNode project = optimizedPlan ; if ( project . getType ( ) != Type . PROJECT ) { project = optimizedPlan . findAtOrBelow ( Traversal . LEVEL_ORDER , Type . PROJECT ) ; } if ( project != null ) { List < Column > columns = project . getPropertyAsList ( Property . PROJECT_COLUMNS , Column . class ) ; List < String > columnTypes = project . getPropertyAsList ( Property . PROJECT_COLUMN_TYPES , String . class ) ; // Determine whether to include the full - text search scores in the results . . .
boolean includeFullTextSearchScores = hints . hasFullTextSearch ; if ( ! includeFullTextSearchScores ) { for ( PlanNode select : optimizedPlan . findAllAtOrBelow ( Type . SELECT ) ) { Constraint constraint = select . getProperty ( Property . SELECT_CRITERIA , Constraint . class ) ; if ( QueryUtil . includeFullTextScores ( constraint ) ) { includeFullTextSearchScores = true ; break ; } } } // The projected columns may not include all of the selectors from the child of the PROJECT node .
// So , we need to figure out the selector indexes based upon the ResultColumn for the child . . .
Columns childColumns = context . columnsFor ( project . getFirstChild ( ) ) ; return new ResultColumns ( columns , columnTypes , includeFullTextSearchScores , childColumns ) ; } // Look for a SOURCE . . .
if ( optimizedPlan . getType ( ) == Type . SOURCE ) { PlanNode source = optimizedPlan ; List < Schemata . Column > schemataColumns = source . getPropertyAsList ( Property . SOURCE_COLUMNS , Schemata . Column . class ) ; List < Column > columns = new ArrayList < > ( schemataColumns . size ( ) ) ; List < String > columnTypes = new ArrayList < > ( schemataColumns . size ( ) ) ; SelectorName selector = source . getSelectors ( ) . iterator ( ) . next ( ) ; for ( Schemata . Column schemataColumn : schemataColumns ) { Column column = new Column ( selector , schemataColumn . getName ( ) , schemataColumn . getName ( ) ) ; columns . add ( column ) ; columnTypes . add ( schemataColumn . getPropertyTypeName ( ) ) ; } return new ResultColumns ( columns , columnTypes , hints . hasFullTextSearch , null ) ; } return ResultColumns . EMPTY ; |
public class BalancePlan { /** * Prints data distribution based on report from NameNode */
public static void logDataDistribution ( DatanodeInfo [ ] report ) { } } | if ( LOG . isInfoEnabled ( ) ) { double avgRemaining = computeAvgRemaining ( Arrays . asList ( report ) ) ; StringBuilder msg = new StringBuilder ( "Data distribution report: avgRemaining " + avgRemaining ) ; for ( DatanodeInfo node : report ) { msg . append ( "\n" ) . append ( node . getName ( ) ) ; msg . append ( " remaining " ) . append ( getRemaining ( node ) ) ; msg . append ( " raw " ) . append ( node . getRemaining ( ) ) . append ( " / " ) . append ( node . getCapacity ( ) ) ; } LOG . info ( msg ) ; } |
public class SqlExecutor { /** * 执行查询语句 < br >
* 此方法不会关闭Connection
* @ param < T > 处理结果类型
* @ param conn 数据库连接对象
* @ param sql 查询语句
* @ param rsh 结果集处理对象
* @ param params 参数
* @ return 结果对象
* @ throws SQLException SQL执行异常 */
public static < T > T query ( Connection conn , String sql , RsHandler < T > rsh , Object ... params ) throws SQLException { } } | PreparedStatement ps = null ; try { ps = StatementUtil . prepareStatement ( conn , sql , params ) ; return executeQuery ( ps , rsh ) ; } finally { DbUtil . close ( ps ) ; } |
public class VirtualMachineScaleSetsInner { /** * Reimages all the disks ( including data disks ) in the virtual machines in a VM scale set . This operation is only supported for managed disks .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < OperationStatusResponseInner > reimageAllAsync ( String resourceGroupName , String vmScaleSetName , final ServiceCallback < OperationStatusResponseInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( reimageAllWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) , serviceCallback ) ; |
public class ECoordinate { /** * Assigns sin ( angle ) to this coordinate . */
void sin ( ECoordinate angle ) { } } | double sinv = Math . sin ( angle . m_value ) ; double cosv = Math . cos ( angle . m_value ) ; m_value = sinv ; double absv = Math . abs ( sinv ) ; m_eps = ( Math . abs ( cosv ) + absv * 0.5 * angle . m_eps ) * angle . m_eps + epsCoordinate ( ) * absv ; |
public class AbstractFilter { /** * Get the globals from the session
* @ param req
* @ return globals object */
public FilterGlobals getGlobals ( final HttpServletRequest req ) { } } | HttpSession sess = req . getSession ( ) ; if ( sess == null ) { // We ' re screwed
return null ; } Object o = sess . getAttribute ( globalsName ) ; FilterGlobals fg ; if ( o == null ) { fg = newFilterGlobals ( ) ; sess . setAttribute ( globalsName , fg ) ; if ( debug ( ) ) { debug ( "Created new FilterGlobals from session " + sess . getId ( ) ) ; } } else { fg = ( FilterGlobals ) o ; // if ( debug ( ) ) {
// getLogger ( ) . debug ( " Obtained FilterGlobals from session with id " +
// sess . getId ( ) ) ;
} return fg ; |
public class GPSPointDimensionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GPSPointDimension gPSPointDimension , ProtocolMarshaller protocolMarshaller ) { } } | if ( gPSPointDimension == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( gPSPointDimension . getCoordinates ( ) , COORDINATES_BINDING ) ; protocolMarshaller . marshall ( gPSPointDimension . getRangeInKilometers ( ) , RANGEINKILOMETERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CmsEditProjectForm { /** * Creates a new project . < p > */
private void createProject ( ) { } } | CmsObject cms = A_CmsUI . getCmsObject ( ) ; try { String name = "/" ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_fieldOU . getValue ( ) ) ) { name = CmsStringUtil . joinPaths ( name , m_fieldOU . getValue ( ) ) ; } if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_fieldName . getValue ( ) ) ) { name = CmsStringUtil . joinPaths ( name , m_fieldName . getValue ( ) ) ; } else { name = CmsStringUtil . joinPaths ( name , "/" ) ; } m_project = cms . createProject ( name , m_fieldDescription . getValue ( ) , m_fieldUser . getValue ( ) , m_fieldManager . getValue ( ) , m_fieldDeleteAfterPublish . getValue ( ) . booleanValue ( ) ? CmsProject . PROJECT_TYPE_TEMPORARY : CmsProject . PROJECT_TYPE_NORMAL ) ; updateProjectResources ( ) ; } catch ( Throwable t ) { CmsErrorDialog . showErrorDialog ( t ) ; } |
public class NamedArgumentDefinition { /** * Return a string with the usage statement for this argument .
* @ param allActualArguments { code Map } of all namedArgumentDefinitions for the containing object
* @ param pluginDescriptors Collection of { @ code CommandLinePluginDescriptor } objects for the containing object
* @ param argumentColumnWidth width reserved for argument name column display
* @ param descriptionColumnWidth width reserved for argument description column display
* @ return the usage string for this argument */
public String getArgumentUsage ( final Map < String , NamedArgumentDefinition > allActualArguments , final Collection < CommandLinePluginDescriptor < ? > > pluginDescriptors , final int argumentColumnWidth , final int descriptionColumnWidth ) { } } | final StringBuilder sb = new StringBuilder ( ) ; sb . append ( "--" ) . append ( getLongName ( ) ) ; if ( ! getShortName ( ) . isEmpty ( ) ) { sb . append ( ",-" ) . append ( getShortName ( ) ) ; } sb . append ( ":" ) . append ( getUnderlyingFieldClass ( ) . getSimpleName ( ) ) ; int labelLength = sb . toString ( ) . length ( ) ; int numSpaces = argumentColumnWidth - labelLength ; if ( labelLength > argumentColumnWidth ) { sb . append ( "\n" ) ; numSpaces = argumentColumnWidth ; } printSpaces ( sb , numSpaces ) ; final String description = getArgumentDescription ( allActualArguments , pluginDescriptors ) ; final String wrappedDescription = Utils . wrapParagraph ( description , descriptionColumnWidth ) ; final String [ ] descriptionLines = wrappedDescription . split ( "\n" ) ; for ( int i = 0 ; i < descriptionLines . length ; ++ i ) { if ( i > 0 ) { printSpaces ( sb , argumentColumnWidth ) ; } sb . append ( descriptionLines [ i ] ) ; sb . append ( "\n" ) ; } sb . append ( "\n" ) ; return sb . toString ( ) ; |
public class Utilities { /** * Set value to a variable ( null is forbiden , so set default value ) .
* @ param value
* is value setted if value is not null .
* @ param defValue
* is value setted if value is null .
* @ return a { link java . lang . String } with the value not null . */
public static String setProperty ( String value , String defValue ) { } } | if ( value != null && ! "" . equals ( value ) ) { return value ; } return defValue ; |
public class Tooltip { /** * Call the native tooltip method with the given argument .
* @ param e the { @ link Element } .
* @ param arg the arg */
private void call ( final Element e , final String arg ) { } } | JQuery . jQuery ( e ) . tooltip ( arg ) ; |
public class FreeMarkerRequestHandler { /** * Build a freemarker model holding the information associated with the
* session .
* This model provides :
* * The ` locale ` ( of type { @ link Locale } ) .
* * The ` resourceBundle ` ( of type { @ link ResourceBundle } ) .
* * A function " ` _ ` " that looks up the given key in the
* resource bundle .
* @ param session
* the session
* @ return the model */
protected Map < String , Object > fmSessionModel ( Optional < Session > session ) { } } | @ SuppressWarnings ( "PMD.UseConcurrentHashMap" ) final Map < String , Object > model = new HashMap < > ( ) ; Locale locale = session . map ( sess -> sess . locale ( ) ) . orElse ( Locale . getDefault ( ) ) ; model . put ( "locale" , locale ) ; final ResourceBundle resourceBundle = resourceBundle ( locale ) ; model . put ( "resourceBundle" , resourceBundle ) ; model . put ( "_" , new TemplateMethodModelEx ( ) { @ Override @ SuppressWarnings ( "PMD.EmptyCatchBlock" ) public Object exec ( @ SuppressWarnings ( "rawtypes" ) List arguments ) throws TemplateModelException { @ SuppressWarnings ( "unchecked" ) List < TemplateModel > args = ( List < TemplateModel > ) arguments ; if ( ! ( args . get ( 0 ) instanceof SimpleScalar ) ) { throw new TemplateModelException ( "Not a string." ) ; } String key = ( ( SimpleScalar ) args . get ( 0 ) ) . getAsString ( ) ; try { return resourceBundle . getString ( key ) ; } catch ( MissingResourceException e ) { // no luck
} return key ; } } ) ; return model ; |
public class DeviceAttributes { /** * < pre >
* String representation of device _ type .
* < / pre >
* < code > optional string device _ type = 2 ; < / code > */
public com . google . protobuf . ByteString getDeviceTypeBytes ( ) { } } | java . lang . Object ref = deviceType_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; deviceType_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; } |
public class IconicsDrawable { /** * Set the color of the drawable .
* @ param colors The color , usually from android . graphics . Color or 0xFF012345.
* @ return The current IconicsDrawable for chaining . */
@ NonNull public IconicsDrawable color ( @ NonNull ColorStateList colors ) { } } | if ( colors != null ) { mIconBrush . setColors ( colors ) ; if ( mIconBrush . applyState ( getState ( ) ) ) { invalidateSelf ( ) ; } } return this ; |
public class StringUtils { /** * Constructs a String with only spaces up to the specified length .
* @ param number an integer value indicating the number of spaces in the String .
* @ return a String containing the specified number of spaces . */
public static String getSpaces ( int number ) { } } | Assert . argument ( number >= 0 , "The number [{0}] of desired spaces must be greater than equal to 0" , number ) ; StringBuilder spaces = new StringBuilder ( Math . max ( number , 0 ) ) ; while ( number > 0 ) { int count = Math . min ( SPACES . length - 1 , number ) ; spaces . append ( SPACES [ count ] ) ; number -= count ; } return spaces . toString ( ) ; |
public class GZIPArchiveReader { /** * Reads the GZIP entry header ( popluating the entry metadata , except for the CRC ) .
* @ param entry the entry where to store the entry metadata ( except for the CRC that is read with the trailer ) .
* @ return the number of bytes consumed while reading the header , or - 1 in case of EOF . */
private GZIPArchive . ReadEntry readHeader ( ) throws IOException { } } | final GZIPArchive . ReadEntry entry = new GZIPArchive . ReadEntry ( ) ; byte [ ] buffer = headerBuffer ; // local copy for efficiency reasons
// ID1 ID2 CM FLG
if ( input . read ( buffer , 0 , 4 ) == - 1 ) return null ; if ( buffer [ 0 ] != GZIPArchive . GZIP_START [ 0 ] || buffer [ 1 ] != GZIPArchive . GZIP_START [ 1 ] ) throw new GZIPArchive . FormatException ( "Missing GZip magic numbers, found: " + buffer [ 0 ] + " " + buffer [ 1 ] ) ; if ( buffer [ 2 ] != GZIPArchive . GZIP_START [ 2 ] ) throw new GZIPArchive . FormatException ( "Unknown compression method: " + buffer [ 2 ] ) ; int flg = buffer [ 3 ] ; // MTIME
entry . mtime = readLEInt ( input ) ; // XFL OS ( ignored )
this . input . read ( buffer , 0 , 2 ) ; /* EXTRA begin */
entry . compressedSkipLength = - 1 ; if ( ( flg & GZIPArchive . FEXTRA ) != 0 ) { // XLEN
short xlen = readLEShort ( input ) ; while ( xlen > 0 ) { // SI1 SI2
input . read ( buffer , 0 , 2 ) ; // LEN
short len = readLEShort ( input ) ; if ( buffer [ 0 ] == GZIPArchive . SKIP_LEN [ 0 ] && buffer [ 1 ] == GZIPArchive . SKIP_LEN [ 1 ] ) { entry . compressedSkipLength = readLEInt ( input ) ; entry . uncompressedSkipLength = readLEInt ( input ) ; } else input . read ( buffer , 0 , len ) ; xlen -= len + GZIPArchive . SKIP_LEN . length + GZIPArchive . SHORT_LEN ; // SI1 , SI2 + 1 short encoding LEN
} } else throw new GZIPArchive . FormatException ( "Missing SL extra field" ) ; if ( entry . compressedSkipLength < 0 ) throw new GZIPArchive . FormatException ( "Negative compressed-skip-length (" + entry . compressedSkipLength + ")" ) ; /* EXTRA end */
// NAME
if ( ( flg & GZIPArchive . FNAME ) != 0 ) { int len = 0 , b ; while ( ( b = this . input . read ( ) ) != 0 ) { buffer [ len ++ ] = ( byte ) b ; } entry . name = Arrays . copyOf ( buffer , len ) ; } // COMMENT
if ( ( flg & GZIPArchive . FCOMMENT ) != 0 ) { int len = 0 , b ; while ( ( b = this . input . read ( ) ) != 0 ) { buffer [ len ++ ] = ( byte ) b ; } entry . comment = Arrays . copyOf ( buffer , len ) ; } // HCRC
if ( ( flg & GZIPArchive . FHCRC ) != 0 ) { this . input . read ( buffer , 0 , 2 ) ; } return entry ; |
public class CloudBigtableScanConfiguration { /** * Converts a { @ link CloudBigtableTableConfiguration } object to a
* { @ link CloudBigtableScanConfiguration } that will perform the specified { @ link Scan } on the
* table .
* @ param config The { @ link CloudBigtableTableConfiguration } object .
* @ param scan The { @ link Scan } to add to the configuration .
* @ return The new { @ link CloudBigtableScanConfiguration } . */
public static CloudBigtableScanConfiguration fromConfig ( CloudBigtableTableConfiguration config , Scan scan ) { } } | CloudBigtableScanConfiguration . Builder builder = new CloudBigtableScanConfiguration . Builder ( ) ; config . copyConfig ( builder ) ; return builder . withScan ( scan ) . build ( ) ; |
public class AFTagAttributes { /** * This method is used exclusively to get the pass through attributes !
* Namespaces http : / / xmlns . jcp . org / jsf / passthrough and http : / / xmlns . jcp . org / jsf / passthrough
* Get all TagAttributesImpl for the passed namespace
* @ param namespace
* namespace to search
* @ return a non - null array of TagAttributesImpl */
@ Override public TagAttribute [ ] getAll ( String namespace ) { } } | if ( namespace == null ) namespace = "" ; List < TagAttribute > list = new ArrayList < TagAttribute > ( ) ; for ( TagAttribute a : attrs ) { if ( namespace . equals ( a . getNamespace ( ) ) ) { list . add ( a ) ; } } TagAttribute [ ] result = new TagAttribute [ list . size ( ) ] ; list . toArray ( result ) ; return result ; |
public class Config { /** * Creates a group analyzer for an area using the selected implementation .
* @ param root the root area for separator detection
* @ return the created group analyzer */
public static GroupAnalyzer createGroupAnalyzer ( AreaImpl root ) { } } | // return new org . fit . segm . grouping . op . GroupAnalyzerByGrouping ( root ) ;
// return new org . fit . segm . grouping . op . GroupAnalyzerBySeparators ( root ) ;
// return new org . fit . segm . grouping . op . GroupAnalyzerByGroupingAndSeparators ( root ) ;
// return new org . fit . segm . grouping . op . GroupAnalyzerByFlooding ( root ) ;
return new org . fit . segm . grouping . op . GroupAnalyzerByStyles ( root , 1 , false ) ; |
public class Unchecked { /** * Wrap a { @ link org . jooq . lambda . fi . util . function . CheckedBiConsumer } in a { @ link BiConsumer } .
* Example :
* < code > < pre >
* map . forEach ( Unchecked . biConsumer ( ( k , v ) - > {
* if ( k = = null | | v = = null )
* throw new Exception ( " No nulls allowed in map " ) ;
* < / pre > < / code > */
public static < T , U > BiConsumer < T , U > biConsumer ( CheckedBiConsumer < T , U > consumer ) { } } | return biConsumer ( consumer , THROWABLE_TO_RUNTIME_EXCEPTION ) ; |
public class FacebookBatcher { /** * / * ( non - Javadoc )
* @ see com . googlecode . batchfb . Batcher # queryFirst ( java . lang . String , java . lang . Class ) */
@ Override public < T > Later < T > queryFirst ( String fql , Class < T > type ) { } } | return this . getBatchForQuery ( ) . queryFirst ( fql , type ) ; |
public class SegmentFelzenszwalbHuttenlocher04 { /** * Finds the root given child . If the child does not point directly to the parent find the parent and make
* the child point directly towards it . */
protected int find ( int child ) { } } | int root = graph . data [ child ] ; if ( root == graph . data [ root ] ) return root ; int inputChild = child ; while ( root != child ) { child = root ; root = graph . data [ child ] ; } graph . data [ inputChild ] = root ; return root ; |
public class AssertUtils { /** * Checks if the provided String is blank , meaning it is empty or contains whitespace only .
* @ param string
* the String to check , may be { @ code null }
* @ return { @ code true } if the String is blank , false otherwise */
public static final boolean stringIsBlank ( final String string ) { } } | if ( string == null || string . length ( ) == 0 ) { return true ; } int stringLength = string . length ( ) ; for ( int i = 0 ; i < stringLength ; i ++ ) { if ( Character . isWhitespace ( string . charAt ( i ) ) == false ) { return false ; } } return true ; |
public class WebContainerBase { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . api . container . WebContainer # addAsWebInfResources ( java . lang . Package , java . lang . String [ ] ) */
@ Override public T addAsWebInfResources ( final Package resourcePackage , final String ... resourceNames ) throws IllegalArgumentException { } } | Validate . notNull ( resourcePackage , "ResourcePackage must be specified" ) ; Validate . notNullAndNoNullValues ( resourceNames , "ResourceNames must be specified and can not container null values" ) ; for ( String resourceName : resourceNames ) { addAsWebInfResource ( resourcePackage , resourceName ) ; } return covarientReturn ( ) ; |
public class DefaultAndroidApp { /** * ( non - Javadoc )
* @ see io . selendroid . android . impl . AndroidAppA # getBasePackage ( ) */
@ Override public String getBasePackage ( ) throws AndroidSdkException { } } | if ( mainPackage == null ) { try { mainPackage = extractApkDetails ( "package: name='(.*?)'" ) ; } catch ( ShellCommandException e ) { throw new SelendroidException ( "The base package name of the apk " + apkFile . getName ( ) + " cannot be extracted." ) ; } } return mainPackage ; |
public class SibTr { /** * Forward an Warning message event to all registered
* < code > TraceEventListener < / code > s if permitted by the Suppressor .
* Each < code > TraceEventListener < / code > will then
* determine whether to log or ignore the forward event .
* @ param tc the non - null < code > TraceComponent < / code > the event is associated
* with .
* @ param s the Suppressor that will determine if this message should be
* suppressed or not .
* @ param msgKey the message key identifying an NLS message for this event .
* This message takes substitution parameters and must be in the
* resource bundle currently associated with the
* < code > TraceComponent < / code > .
* @ param objs an < code > Object < / code > or array of < code > Objects < / code > to
* include as substitution text in the message . The number of objects
* passed must equal the number of substitution parameters the message
* expects . */
public static final void warning ( TraceComponent tc , Suppressor s , String msgKey , Object objs ) { } } | SibMessage . SuppressableWarning ( s , getMEName ( null ) , tc , msgKey , objs ) ; |
public class EvenMoreObjects { /** * Configures { @ code builder } using { @ code consumer } and then builds it using { @ code creator } .
* @ param builder the builder
* @ param consumer the consumer
* @ param creator the creator
* @ param < B > the builder type
* @ param < C > the creator type
* @ return the value */
public static < B , C > @ NonNull C make ( final @ NonNull B builder , final @ NonNull Consumer < B > consumer , final @ NonNull Function < B , C > creator ) { } } | return creator . apply ( make ( builder , consumer ) ) ; |
public class GeneratorSetDocReader { /** * Returns a { @ link GeneratorSet } instance . */
public IGeneratorSet getGeneratorSet ( ) { } } | SAXParser parser ; try { parser = SAXParserFactory . newInstance ( ) . newSAXParser ( ) ; } catch ( Exception e ) { throw new RuntimeException ( "Can't create SAXParser" , e ) ; } try { parser . parse ( getInputStream ( ) , this ) ; } catch ( SAXParseException spe ) { throw new RuntimeException ( "Error in document at line=" + spe . getLineNumber ( ) , spe ) ; } catch ( Exception e ) { throw new RuntimeException ( "Can't read GeneratorSet" , e ) ; } return generatorSet_ ; |
public class PortletDescriptorImpl { /** * If not already created , a new < code > custom - portlet - mode < / code > element will be created and returned .
* Otherwise , the first existing < code > custom - portlet - mode < / code > element will be returned .
* @ return the instance defined for the element < code > custom - portlet - mode < / code > */
public CustomPortletModeType < PortletDescriptor > getOrCreateCustomPortletMode ( ) { } } | List < Node > nodeList = model . get ( "custom-portlet-mode" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new CustomPortletModeTypeImpl < PortletDescriptor > ( this , "custom-portlet-mode" , model , nodeList . get ( 0 ) ) ; } return createCustomPortletMode ( ) ; |
public class Node { /** * Virtualized support for map . get ( ) ; overridden in subclasses . */
Node < K , V > find ( int h , Object k ) { } } | Node < K , V > e = this ; if ( k != null ) { do { K ek ; if ( e . hash == h && ( ( ek = e . key ) == k || ( ek != null && k . equals ( ek ) ) ) ) return e ; } while ( ( e = e . next ) != null ) ; } return null ; |
public class EmailSender { /** * Send email with a string content .
* @ param to destination email address
* @ param subject email subject
* @ param content email content
* @ throws MessagingException message exception */
public void sendMail ( String to , String subject , String content ) throws MessagingException { } } | Properties props = new Properties ( ) ; props . put ( "mail.smtp.user" , emailConfg . getUser ( ) ) ; props . put ( "mail.smtp.host" , emailConfg . getHost ( ) ) ; props . put ( "mail.smtp.port" , emailConfg . getPort ( ) ) ; props . put ( "mail.smtp.starttls.enable" , "true" ) ; props . put ( "mail.smtp.debug" , emailConfg . getDebug ( ) ) ; props . put ( "mail.smtp.auth" , emailConfg . getAuth ( ) ) ; props . put ( "mail.smtp.ssl.trust" , emailConfg . host ) ; SMTPAuthenticator auth = new SMTPAuthenticator ( emailConfg . getUser ( ) , ( String ) secret . get ( SecretConstants . EMAIL_PASSWORD ) ) ; Session session = Session . getInstance ( props , auth ) ; MimeMessage message = new MimeMessage ( session ) ; message . setFrom ( new InternetAddress ( emailConfg . getUser ( ) ) ) ; message . addRecipient ( Message . RecipientType . TO , new InternetAddress ( to ) ) ; message . setSubject ( subject ) ; message . setContent ( content , "text/html" ) ; // Send message
Transport . send ( message ) ; if ( logger . isInfoEnabled ( ) ) logger . info ( "An email has been sent to " + to + " with subject " + subject ) ; |
public class RestrictedGuacamoleTunnelService { /** * Attempts to add a single instance of the given value to the given
* multiset without exceeding the specified maximum number of values . If
* the value cannot be added without exceeding the maximum , false is
* returned .
* @ param < T >
* The type of values contained within the multiset .
* @ param multiset
* The multiset to attempt to add a value to .
* @ param value
* The value to attempt to add .
* @ param max
* The maximum number of each distinct value that the given multiset
* should hold , or zero if no limit applies .
* @ return
* true if the value was successfully added without exceeding the
* specified maximum , false if the value could not be added . */
private < T > boolean tryAdd ( ConcurrentHashMultiset < T > multiset , T value , int max ) { } } | // Repeatedly attempt to add a new value to the given multiset until we
// explicitly succeed or explicitly fail
while ( true ) { // Get current number of values
int count = multiset . count ( value ) ; // Bail out if the maximum has already been reached
if ( count >= max && max != 0 ) return false ; // Attempt to add one more value
if ( multiset . setCount ( value , count , count + 1 ) ) return true ; // Try again if unsuccessful
} |
public class ControlHandler { /** * Start a new thread to shutdown the server */
private void stopServer ( ) { } } | // Get current server
final Server targetServer = this . getServer ( ) ; // Start a new thread in order to escape the destruction of this Handler
// during the stop process .
new Thread ( ) { @ Override public void run ( ) { try { targetServer . stop ( ) ; } catch ( Exception e ) { // ignore
} } } . start ( ) ; |
public class TextBuffer { /** * Similar to { @ link # resetWithEmpty } , but actively marks current
* text content to be empty string ( whereas former method leaves
* content as undefined ) . */
public void resetWithEmptyString ( ) { } } | mInputBuffer = null ; mInputStart = - 1 ; // indicates shared buffer not used
mInputLen = 0 ; mResultString = "" ; mResultArray = null ; if ( mHasSegments ) { clearSegments ( ) ; } mCurrentSize = 0 ; |
public class TrackerMeanShiftComaniciu2003 { /** * Searches for the target in the most recent image .
* @ param image Most recent image in the sequence */
public void track ( T image ) { } } | // configure the different regions based on size
region0 . set ( region ) ; region1 . set ( region ) ; region2 . set ( region ) ; region0 . width *= 1 - scaleChange ; region0 . height *= 1 - scaleChange ; region2 . width *= 1 + scaleChange ; region2 . height *= 1 + scaleChange ; // distance from histogram
double distance0 = 1 , distance1 , distance2 = 1 ; // perform mean - shift at the different sizes and compute their distance
if ( ! constantScale ) { if ( region0 . width >= minimumWidth ) { updateLocation ( image , region0 ) ; distance0 = distanceHistogram ( keyHistogram , calcHistogram . getHistogram ( ) ) ; if ( updateHistogram ) System . arraycopy ( calcHistogram . getHistogram ( ) , 0 , histogram0 , 0 , histogram0 . length ) ; } updateLocation ( image , region2 ) ; distance2 = distanceHistogram ( keyHistogram , calcHistogram . getHistogram ( ) ) ; if ( updateHistogram ) System . arraycopy ( calcHistogram . getHistogram ( ) , 0 , histogram2 , 0 , histogram2 . length ) ; } // update the no scale change hypothesis
updateLocation ( image , region1 ) ; if ( ! constantScale ) { distance1 = distanceHistogram ( keyHistogram , calcHistogram . getHistogram ( ) ) ; } else { // force it to select
distance1 = 0 ; } if ( updateHistogram ) System . arraycopy ( calcHistogram . getHistogram ( ) , 0 , histogram1 , 0 , histogram1 . length ) ; RectangleRotate_F32 selected = null ; float selectedHist [ ] = null ; switch ( selectBest ( distance0 , distance1 , distance2 ) ) { case 0 : selected = region0 ; selectedHist = histogram0 ; break ; case 1 : selected = region1 ; selectedHist = histogram1 ; break ; case 2 : selected = region2 ; selectedHist = histogram2 ; break ; default : throw new RuntimeException ( "Bug in selectBest" ) ; } // Set region to the best scale , but reduce sensitivity by weighting it against the original size
// equation 14
float w = selected . width * ( 1 - gamma ) + gamma * region . width ; float h = selected . height * ( 1 - gamma ) + gamma * region . height ; region . set ( selected ) ; region . width = w ; region . height = h ; if ( updateHistogram ) { System . arraycopy ( selectedHist , 0 , keyHistogram , 0 , keyHistogram . length ) ; } |
public class BioAssemblyTools { /** * Returns the maximum extend of the biological molecule in the x , y , or z direction .
* @ param structure
* @ return maximum extend */
public static double getBiologicalMoleculeMaximumExtend ( final Structure structure , List < BiologicalAssemblyTransformation > transformations ) { } } | double [ ] [ ] bounds = getBiologicalMoleculeBounds ( structure , transformations ) ; double xMax = Math . abs ( bounds [ 0 ] [ 0 ] - bounds [ 1 ] [ 0 ] ) ; double yMax = Math . abs ( bounds [ 0 ] [ 1 ] - bounds [ 1 ] [ 1 ] ) ; double zMax = Math . abs ( bounds [ 0 ] [ 2 ] - bounds [ 1 ] [ 2 ] ) ; return Math . max ( xMax , Math . max ( yMax , zMax ) ) ; |
public class BatchTask { /** * Creates the record readers for the number of inputs as defined by { @ link # getNumTaskInputs ( ) } .
* This method requires that the task configuration , the driver , and the user - code class loader are set . */
protected void initInputReaders ( ) throws Exception { } } | final int numInputs = getNumTaskInputs ( ) ; final MutableReader < ? > [ ] inputReaders = new MutableReader < ? > [ numInputs ] ; int currentReaderOffset = 0 ; for ( int i = 0 ; i < numInputs ; i ++ ) { // - - - - - create the input readers - - - - -
// in case where a logical input unions multiple physical inputs , create a union reader
final int groupSize = this . config . getGroupSize ( i ) ; if ( groupSize == 1 ) { // non - union case
inputReaders [ i ] = new MutableRecordReader < IOReadableWritable > ( getEnvironment ( ) . getInputGate ( currentReaderOffset ) , getEnvironment ( ) . getTaskManagerInfo ( ) . getTmpDirectories ( ) ) ; } else if ( groupSize > 1 ) { // union case
InputGate [ ] readers = new InputGate [ groupSize ] ; for ( int j = 0 ; j < groupSize ; ++ j ) { readers [ j ] = getEnvironment ( ) . getInputGate ( currentReaderOffset + j ) ; } inputReaders [ i ] = new MutableRecordReader < IOReadableWritable > ( new UnionInputGate ( readers ) , getEnvironment ( ) . getTaskManagerInfo ( ) . getTmpDirectories ( ) ) ; } else { throw new Exception ( "Illegal input group size in task configuration: " + groupSize ) ; } currentReaderOffset += groupSize ; } this . inputReaders = inputReaders ; // final sanity check
if ( currentReaderOffset != this . config . getNumInputs ( ) ) { throw new Exception ( "Illegal configuration: Number of input gates and group sizes are not consistent." ) ; } |
public class Point2i { /** * Convert the given tuple to a real Point2i .
* < p > If the given tuple is already a Point2i , it is replied .
* @ param tuple the tuple .
* @ return the Point2i .
* @ since 14.0 */
public static Point2i convert ( Tuple2D < ? > tuple ) { } } | if ( tuple instanceof Point2i ) { return ( Point2i ) tuple ; } return new Point2i ( tuple . getX ( ) , tuple . getY ( ) ) ; |
public class IntBuffer { /** * Returns a duplicated buffer that shares its content with this buffer .
* < p > The duplicated buffer ' s position , limit , capacity and mark are the same as this buffer .
* The duplicated buffer ' s read - only property and byte order are the same as this buffer ' s .
* < p > The new buffer shares its content with this buffer , which means either buffer ' s change
* of content will be visible to the other . The two buffer ' s position , limit and mark are
* independent . < / p >
* @ return a duplicated buffer that shares its content with this buffer . */
public IntBuffer duplicate ( ) { } } | IntBuffer buf = new IntBuffer ( ( ByteBuffer ) byteBuffer . duplicate ( ) ) ; buf . limit = limit ; buf . position = position ; buf . mark = mark ; return buf ; |
public class ResourceObjectIncludeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . RESOURCE_OBJECT_INCLUDE__OBJ_TYPE : setObjType ( OBJ_TYPE_EDEFAULT ) ; return ; case AfplibPackage . RESOURCE_OBJECT_INCLUDE__OBJ_NAME : setObjName ( OBJ_NAME_EDEFAULT ) ; return ; case AfplibPackage . RESOURCE_OBJECT_INCLUDE__XOBJ_OSET : setXobjOset ( XOBJ_OSET_EDEFAULT ) ; return ; case AfplibPackage . RESOURCE_OBJECT_INCLUDE__YOBJ_OSET : setYobjOset ( YOBJ_OSET_EDEFAULT ) ; return ; case AfplibPackage . RESOURCE_OBJECT_INCLUDE__OB_ORENT : setObOrent ( OB_ORENT_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ; |
public class Main { /** * Sets whether execution should break when a script exception is thrown . */
public void setBreakOnExceptions ( boolean value ) { } } | dim . setBreakOnExceptions ( value ) ; debugGui . getMenubar ( ) . getBreakOnExceptions ( ) . setSelected ( value ) ; |
public class ComponentImpl { /** * will be called after invoking constructor , only invoked by constructor ( component body execution )
* @ param pc
* @ param parent
* @ throws ApplicationException */
public void afterConstructor ( PageContext pc , Variables parent ) throws ApplicationException { } } | pc . setVariablesScope ( parent ) ; this . afterConstructor = true ; /* * if ( constructorUDFs ! = null ) { Iterator < Entry < Key , UDF > > it = constructorUDFs . entrySet ( ) . iterator ( ) ;
* Map . Entry < Key , UDF > entry ; Key key ; UDFPlus udf ; PageSource ps ; while ( it . hasNext ( ) ) {
* entry = it . next ( ) ; key = entry . getKey ( ) ; udf = ( UDFPlus ) entry . getValue ( ) ; ps = udf . getPageSource ( ) ;
* / / if ( ps ! = null & & ps . equals ( getPageSource ( ) ) ) continue ; / / TODO can we avoid that udfs from the
* component itself are here ? registerUDF ( key , udf , false , true ) ; } } */ |
public class AbstractResult { /** * Computes the sum over all data items .
* @ param meter the meter of the mean
* @ return the sum of all runs . */
public final double sum ( final AbstractMeter meter ) { } } | checkIfMeterExists ( meter ) ; final AbstractUnivariateStatistic sum = new Sum ( ) ; final CollectionDoubleCollection doubleColl = new CollectionDoubleCollection ( this . meterResults . get ( meter ) ) ; return sum . evaluate ( doubleColl . toArray ( ) , 0 , doubleColl . toArray ( ) . length ) ; |
public class LzoTextInputFormat { /** * Index an lzo file to allow the input format to split them into separate map
* jobs .
* @ param fs
* File system that contains the file .
* @ param lzoFile
* the lzo file to index .
* @ throws IOException */
public static void createIndex ( FileSystem fs , Path lzoFile ) throws IOException { } } | Configuration conf = fs . getConf ( ) ; CompressionCodecFactory factory = new CompressionCodecFactory ( fs . getConf ( ) ) ; CompressionCodec codec = factory . getCodec ( lzoFile ) ; ( ( Configurable ) codec ) . setConf ( conf ) ; InputStream lzoIs = null ; FSDataOutputStream os = null ; Path outputFile = new Path ( lzoFile . toString ( ) + LzoTextInputFormat . LZO_INDEX_SUFFIX ) ; Path tmpOutputFile = outputFile . suffix ( ".tmp" ) ; try { FSDataInputStream is = fs . open ( lzoFile ) ; os = fs . create ( tmpOutputFile ) ; LzopDecompressor decompressor = ( LzopDecompressor ) codec . createDecompressor ( ) ; // for reading the header
lzoIs = codec . createInputStream ( is , decompressor ) ; int numChecksums = decompressor . getChecksumsCount ( ) ; while ( true ) { // read and ignore , we just want to get to the next int
int uncompressedBlockSize = is . readInt ( ) ; if ( uncompressedBlockSize == 0 ) { break ; } else if ( uncompressedBlockSize < 0 ) { throw new EOFException ( ) ; } int compressedBlockSize = is . readInt ( ) ; if ( compressedBlockSize <= 0 ) { throw new IOException ( "Could not read compressed block size" ) ; } long pos = is . getPos ( ) ; // write the pos of the block start
os . writeLong ( pos - 8 ) ; // seek to the start of the next block , skip any checksums
is . seek ( pos + compressedBlockSize + ( 4 * numChecksums ) ) ; } } finally { if ( lzoIs != null ) { lzoIs . close ( ) ; } if ( os != null ) { os . close ( ) ; } } fs . rename ( tmpOutputFile , outputFile ) ; |
public class Strands { /** * This utility method prints a stack - trace into a { @ link java . io . PrintStream }
* @ param trace a stack trace ( such as returned from { @ link Strand # getStackTrace ( ) } .
* @ param out the { @ link java . io . PrintStream } into which the stack trace will be printed . */
public static void printStackTrace ( StackTraceElement [ ] trace , java . io . PrintStream out ) { } } | Strand . printStackTrace ( trace , out ) ; |
public class WebServiceRefBindingBuilder { /** * This method will be used to create an instance of a WebServiceRefBinding object
* that holds metadata obtained from an @ Resource annotation . The @ Resource annotation
* in this case would have been indicating a JAX - WS service type injection . */
static InjectionBinding < WebServiceRef > createWebServiceRefBindingFromResource ( Resource resource , ComponentNameSpaceConfiguration cnsConfig , Class < ? > serviceClass , String jndiName ) throws InjectionException { } } | InjectionBinding < WebServiceRef > binding = null ; WebServiceRef wsRef = createWebServiceRefFromResource ( resource , serviceClass , jndiName ) ; WebServiceRefInfo wsrInfo = WebServiceRefInfoBuilder . buildWebServiceRefInfo ( wsRef , cnsConfig . getClassLoader ( ) ) ; wsrInfo . setClientMetaData ( JaxWsMetaDataManager . getJaxWsClientMetaData ( cnsConfig . getModuleMetaData ( ) ) ) ; wsrInfo . setServiceInterfaceClassName ( serviceClass . getName ( ) ) ; binding = new WebServiceRefBinding ( wsRef , cnsConfig ) ; // register the metadata , and set a flag on the binding instance that let ' s us
// know this binding represents metadata from an @ Resource annotation
( ( WebServiceRefBinding ) binding ) . setWebServiceRefInfo ( wsrInfo ) ; ( ( WebServiceRefBinding ) binding ) . setResourceType ( true ) ; return binding ; |
public class DefaultLocaleProvider { /** * Retrieves the serialized XML for the given locale from the classpath .
* For example , if the locale is < code > en - US < / code > this method loads
* the file < code > / locales - en - US . xml < / code > from the classpath . */
@ Override public String retrieveLocale ( String lang ) { } } | String r = locales . get ( lang ) ; if ( r == null ) { try { URL u = getClass ( ) . getResource ( "/locales-" + lang + ".xml" ) ; if ( u == null ) { throw new IllegalArgumentException ( "Unable to load locale " + lang + ". Make sure you have a file called " + "'/locales-" + lang + ".xml' at the root of your " + "classpath. Did you add the CSL locale files to " + "your classpath?" ) ; } r = CSLUtils . readURLToString ( u , "UTF-8" ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } locales . put ( lang , r ) ; } return r ; |
public class OCP_Sample { /** * tag OCP request - > HTTPMessage */
public static void httpComponent ( HttpPut request , String auth ) { } } | auth = auth + "password" ; request . addHeader ( "Authorization" , Base64 . encodeBase64String ( auth . getBytes ( StandardCharsets . UTF_8 ) ) ) ; |
public class PythonStreamExecutionEnvironment { /** * A thin wrapper layer over { @ link StreamExecutionEnvironment # readTextFile ( java . lang . String ) } .
* @ param path The path of the file , as a URI ( e . g . , " file : / / / some / local / file " or " hdfs : / / host : port / file / path " ) .
* @ return The data stream that represents the data read from the given file as text lines
* @ throws IOException */
public PythonDataStream read_text_file ( String path ) throws IOException { } } | return new PythonDataStream < > ( env . readTextFile ( path ) . map ( new AdapterMap < String > ( ) ) ) ; |
public class CheckArg { /** * Asserts that the specified first object is { @ link Object # equals ( Object ) equal to } the specified second object . This method
* does take null references into consideration .
* @ param < T >
* @ param argument The argument to assert equal to < code > object < / code > .
* @ param argumentName The name that will be used within the exception message for the argument should an exception be thrown
* @ param object The object to assert as equal to < code > argument < / code > .
* @ param objectName The name that will be used within the exception message for < code > object < / code > should an exception be
* thrown ; if < code > null < / code > and < code > object < / code > is not < code > null < / code > , < code > object . toString ( ) < / code > will
* be used .
* @ throws IllegalArgumentException If the specified objects are not equal . */
public static < T > void isEquals ( final T argument , String argumentName , final T object , String objectName ) { } } | if ( argument == null ) { if ( object == null ) return ; // fall through . . . one is null
} else { if ( argument . equals ( object ) ) return ; // fall through . . . they are not equal
} if ( objectName == null ) objectName = getObjectName ( object ) ; throw new IllegalArgumentException ( CommonI18n . argumentMustBeEquals . text ( argumentName , objectName ) ) ; |
public class MethodReturnsConstant { /** * implements the visitor to look for methods that return a constant
* @ param seen
* the opcode of the currently parsed instruction */
@ Override public void sawOpcode ( int seen ) { } } | boolean sawSBToString = false ; try { stack . precomputation ( this ) ; if ( ( seen >= Const . IRETURN ) && ( seen <= Const . ARETURN ) ) { if ( stack . getStackDepth ( ) > 0 ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; Integer register = Integer . valueOf ( item . getRegisterNumber ( ) ) ; Object constant = registerConstants . get ( register ) ; if ( CONSTANT_DOESNT_EXIST . equals ( constant ) ) { throw new StopOpcodeParsingException ( ) ; } String returnSig = item . getSignature ( ) ; if ( ( returnSig != null ) && returnSig . startsWith ( Values . SIG_ARRAY_PREFIX ) ) { XField f = item . getXField ( ) ; if ( ( f == null ) || ( ! f . isStatic ( ) ) ) { throw new StopOpcodeParsingException ( ) ; } } constant = item . getConstant ( ) ; if ( constant == null ) { throw new StopOpcodeParsingException ( ) ; } if ( Boolean . TRUE . equals ( item . getUserValue ( ) ) && ( "" . equals ( constant ) ) ) { throw new StopOpcodeParsingException ( ) ; } if ( ( returnConstant != null ) && ( ! returnConstant . equals ( constant ) ) ) { throw new StopOpcodeParsingException ( ) ; } returnRegister = Integer . valueOf ( item . getRegisterNumber ( ) ) ; returnConstant = constant ; returnPC = getPC ( ) ; } } else if ( ( seen == Const . GOTO ) || ( seen == Const . GOTO_W ) ) { if ( stack . getStackDepth ( ) > 0 ) { // Ternaries confuse us too much , if the code has a ternary well - oh well
throw new StopOpcodeParsingException ( ) ; } } else if ( seen == Const . ATHROW ) { throw new StopOpcodeParsingException ( ) ; } else if ( seen == Const . INVOKEVIRTUAL ) { String clsName = getClassConstantOperand ( ) ; if ( SignatureUtils . isPlainStringConvertableClass ( clsName ) ) { sawSBToString = Values . TOSTRING . equals ( getNameConstantOperand ( ) ) ; } } else if ( ( ( seen >= Const . ISTORE ) && ( seen <= Const . ASTORE_3 ) ) || ( seen == Const . IINC ) ) { Integer register = Integer . valueOf ( getRegisterOperand ( ) ) ; if ( ( returnRegister . intValue ( ) != - 1 ) && ( register . equals ( returnRegister ) ) ) { throw new StopOpcodeParsingException ( ) ; } if ( stack . getStackDepth ( ) > 0 ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; Object constant = item . getConstant ( ) ; Object regConstant = registerConstants . get ( register ) ; if ( regConstant != null ) { if ( ( constant == null ) || ! constant . equals ( regConstant ) ) { registerConstants . put ( register , CONSTANT_DOESNT_EXIST ) ; } } else { if ( item . getSignature ( ) . contains ( Values . SIG_ARRAY_PREFIX ) ) { registerConstants . put ( register , CONSTANT_DOESNT_EXIST ) ; } else { registerConstants . put ( register , constant == null ? CONSTANT_DOESNT_EXIST : constant ) ; } } } else { registerConstants . put ( register , CONSTANT_DOESNT_EXIST ) ; } if ( returnRegister . equals ( register ) ) { Object constant = registerConstants . get ( returnRegister ) ; if ( CONSTANT_DOESNT_EXIST . equals ( constant ) ) { throw new StopOpcodeParsingException ( ) ; } } } } finally { TernaryPatcher . pre ( stack , seen ) ; stack . sawOpcode ( this , seen ) ; TernaryPatcher . post ( stack , seen ) ; if ( sawSBToString && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; item . setUserValue ( Boolean . TRUE ) ; } } |
public class AbstractArakhneMojo { /** * Join the values with the given joint .
* @ param joint the joint .
* @ param values the values .
* @ return the jointed values */
public static String join ( String joint , String ... values ) { } } | final StringBuilder b = new StringBuilder ( ) ; for ( final String value : values ) { if ( value != null && ! EMPTY_STRING . equals ( value ) ) { if ( b . length ( ) > 0 ) { b . append ( joint ) ; } b . append ( value ) ; } } return b . toString ( ) ; |
public class ActivityChooserModel { /** * Gets the index of a the given activity .
* @ param activity The activity index .
* @ return The index if found , - 1 otherwise . */
public int getActivityIndex ( ResolveInfo activity ) { } } | List < ActivityResolveInfo > activities = mActivites ; final int activityCount = activities . size ( ) ; for ( int i = 0 ; i < activityCount ; i ++ ) { ActivityResolveInfo currentActivity = activities . get ( i ) ; if ( currentActivity . resolveInfo == activity ) { return i ; } } return INVALID_INDEX ; |
public class nstcpparam { /** * Use this API to unset the properties of nstcpparam resource .
* Properties that need to be unset are specified in args array . */
public static base_response unset ( nitro_service client , nstcpparam resource , String [ ] args ) throws Exception { } } | nstcpparam unsetresource = new nstcpparam ( ) ; return unsetresource . unset_resource ( client , args ) ; |
public class ReplicationLinksInner { /** * Sets which replica database is primary by failing over from the current primary replica database . This operation might result in data loss .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database that has the replication link to be failed over .
* @ param linkId The ID of the replication link to be failed over .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginFailoverAllowDataLoss ( String resourceGroupName , String serverName , String databaseName , String linkId ) { } } | beginFailoverAllowDataLossWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , linkId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class PasswordSpec { /** * Parse a string representation of password spec .
* A password spec string should be ` < trait spec > < length spec > ` .
* Where " trait spec " should be a composition of
* * ` a ` - indicate lowercase letter required
* * ` A ` - indicate uppercase letter required
* * ` 0 ` - indicate digit letter required
* * ` # ` - indicate special character required
* " length spec " should be ` [ min , max ] ` where ` max ` can be omitted .
* Here are examples of valid " length spec " :
* * ` [ 6,20 ] ` / / min length : 6 , max length : 20
* * ` [ 8 , ] ` / / min length : 8 , max length : unlimited
* And examples of invalid " length spec " :
* * ` [ 8 ] ` / / " , " required after min part
* * ` [ a , f ] ` / / min and max part needs to be decimal digit ( s )
* * ` [ 3,9 ) ` / / length spec must be started with ` [ ` and end with ` ] `
* @ param spec a string representation of password spec
* @ return a { @ link PasswordSpec } instance */
public static PasswordSpec parse ( String spec ) { } } | char [ ] ca = spec . toCharArray ( ) ; int len = ca . length ; illegalIf ( 0 == len , spec ) ; Builder builder = new Builder ( ) ; StringBuilder minBuf = new StringBuilder ( ) ; StringBuilder maxBuf = new StringBuilder ( ) ; boolean lenSpecStart = false ; boolean minPart = false ; for ( int i = 0 ; i < len ; ++ i ) { char c = ca [ i ] ; switch ( c ) { case SPEC_LOWERCASE : illegalIf ( lenSpecStart , spec ) ; builder . requireLowercase ( ) ; break ; case SPEC_UPPERCASE : illegalIf ( lenSpecStart , spec ) ; builder . requireUppercase ( ) ; break ; case SPEC_SPECIAL_CHAR : illegalIf ( lenSpecStart , spec ) ; builder . requireSpecialChar ( ) ; break ; case SPEC_LENSPEC_START : lenSpecStart = true ; minPart = true ; break ; case SPEC_LENSPEC_CLOSE : illegalIf ( minPart , spec ) ; lenSpecStart = false ; break ; case SPEC_LENSPEC_SEP : minPart = false ; break ; case SPEC_DIGIT : if ( ! lenSpecStart ) { builder . requireDigit ( ) ; } else { if ( minPart ) { minBuf . append ( c ) ; } else { maxBuf . append ( c ) ; } } break ; default : illegalIf ( ! lenSpecStart || ! isDigit ( c ) , spec ) ; if ( minPart ) { minBuf . append ( c ) ; } else { maxBuf . append ( c ) ; } } } illegalIf ( lenSpecStart , spec ) ; if ( minBuf . length ( ) != 0 ) { builder . minLength ( Integer . parseInt ( minBuf . toString ( ) ) ) ; } if ( maxBuf . length ( ) != 0 ) { builder . maxLength ( Integer . parseInt ( maxBuf . toString ( ) ) ) ; } return builder ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.