signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class EJSContainer { /** * addBean */ public boolean lockBean ( BeanO beanO , ContainerTx containerTx ) throws RemoteException { } }
return activator . lockBean ( containerTx , beanO . beanId ) ; // d140003.29
public class ImageMiscOps { /** * Rotates the image 90 degrees in the clockwise direction . */ public static void rotateCW ( GrayS32 input , GrayS32 output ) { } }
if ( input . width != output . height || input . height != output . width ) throw new IllegalArgumentException ( "Incompatible shapes" ) ; int h = input . height - 1 ; for ( int y = 0 ; y < input . height ; y ++ ) { int indexIn = input . startIndex + y * input . stride ; for ( int x = 0 ; x < input . width ; x ++ ) { output . unsafe_set ( h - y , x , input . data [ indexIn ++ ] ) ; } }
public class Char { /** * Decodes all ampersand sequences in the string */ public static String decodeAmpersand ( String s ) { } }
if ( s == null || s . indexOf ( '&' ) == - 1 ) return ( s ) ; StringBuilder result = new StringBuilder ( ) ; int [ ] eatLength = new int [ 1 ] ; // add this in order to multithread safe while ( s . length ( ) != 0 ) { char c = eatAmpersand ( s , eatLength ) ; if ( eatLength [ 0 ] > 1 ) { result . append ( c ) ; s = s . substring ( eatLength [ 0 ] ) ; } else { result . append ( s . charAt ( 0 ) ) ; s = s . substring ( 1 ) ; } } return ( result . toString ( ) ) ;
public class OrderNoteUrl { /** * Get Resource Url for GetReturnNote * @ param noteId Unique identifier of a particular note to retrieve . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param returnId Unique identifier of the return whose items you want to get . * @ return String Resource Url */ public static MozuUrl getReturnNoteUrl ( String noteId , String responseFields , String returnId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/returns/{returnId}/notes/{noteId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "noteId" , noteId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "returnId" , returnId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class SwingUtil { /** * Center the given window within the screen boundaries . * @ param window the window to be centered . */ public static void centerWindow ( Window window ) { } }
Rectangle bounds ; try { bounds = GraphicsEnvironment . getLocalGraphicsEnvironment ( ) . getDefaultScreenDevice ( ) . getDefaultConfiguration ( ) . getBounds ( ) ; } catch ( Throwable t ) { Toolkit tk = window . getToolkit ( ) ; Dimension ss = tk . getScreenSize ( ) ; bounds = new Rectangle ( ss ) ; } int width = window . getWidth ( ) , height = window . getHeight ( ) ; window . setBounds ( bounds . x + ( bounds . width - width ) / 2 , bounds . y + ( bounds . height - height ) / 2 , width , height ) ;
public class WebGL10 { /** * < p > { @ code glAttachShader } attaches the shader object specified by { @ code shaderID } to the program object * specified by { @ code programID } . This indicates that shader will be included in link operations that will be * performed on program . < / p > * < p > { @ link # GL _ INVALID _ VALUE } is generated if either program or shader is not a value generated by OpenGL . < / p > * < p > { @ link # GL _ INVALID _ OPERATION } is generated if program is not a program object . < / p > * < p > { @ link # GL _ INVALID _ OPERATION } is generated if shader is not a shader object . < / p > * < p > { @ link # GL _ INVALID _ OPERATION } is generated if shader is already attached to program . < / p > * @ param programID Specifies the program object to which a shader object will be attached . * @ param shaderID Specifies the shader object that is to be attached . */ public static void glAttachShader ( int programID , int shaderID ) { } }
checkContextCompatibility ( ) ; JavaScriptObject program = WebGLObjectMap . get ( ) . toProgram ( programID ) ; JavaScriptObject shader = WebGLObjectMap . get ( ) . toShader ( shaderID ) ; nglAttachShader ( program , shader ) ;
public class StructrCMISService { /** * - - - - - interface CmisService - - - - - */ @ Override public void deleteObjectOrCancelCheckOut ( String repositoryId , String objectId , Boolean allVersions , ExtensionsData extension ) { } }
objectService . deleteObject ( repositoryId , objectId , allVersions , extension ) ;
public class CompactingHashTable { /** * Replaces record in hash table if record already present or append record if not . * May trigger expensive compaction . * @ param record record to insert or replace * @ param tempHolder instance of T that will be overwritten * @ throws IOException */ public void insertOrReplaceRecord ( T record , T tempHolder ) throws IOException { } }
final int searchHashCode = hash ( this . buildSideComparator . hash ( record ) ) ; final int posHashCode = searchHashCode % this . numBuckets ; // get the bucket for the given hash code MemorySegment originalBucket = this . buckets [ posHashCode >> this . bucketsPerSegmentBits ] ; int originalBucketOffset = ( posHashCode & this . bucketsPerSegmentMask ) << NUM_INTRA_BUCKET_BITS ; MemorySegment bucket = originalBucket ; int bucketInSegmentOffset = originalBucketOffset ; // get the basic characteristics of the bucket final int partitionNumber = bucket . get ( bucketInSegmentOffset + HEADER_PARTITION_OFFSET ) ; final InMemoryPartition < T > partition = this . partitions . get ( partitionNumber ) ; final MemorySegment [ ] overflowSegments = partition . overflowSegments ; this . buildSideComparator . setReference ( record ) ; int countInSegment = bucket . getInt ( bucketInSegmentOffset + HEADER_COUNT_OFFSET ) ; int numInSegment = 0 ; int posInSegment = bucketInSegmentOffset + BUCKET_HEADER_LENGTH ; long currentForwardPointer = BUCKET_FORWARD_POINTER_NOT_SET ; // loop over all segments that are involved in the bucket ( original bucket plus overflow buckets ) while ( true ) { while ( numInSegment < countInSegment ) { final int thisCode = bucket . getInt ( posInSegment ) ; posInSegment += HASH_CODE_LEN ; // check if the hash code matches if ( thisCode == searchHashCode ) { // get the pointer to the pair final int pointerOffset = bucketInSegmentOffset + BUCKET_POINTER_START_OFFSET + ( numInSegment * POINTER_LEN ) ; final long pointer = bucket . getLong ( pointerOffset ) ; numInSegment ++ ; // deserialize the key to check whether it is really equal , or whether we had only a hash collision try { partition . readRecordAt ( pointer , tempHolder ) ; if ( this . buildSideComparator . equalToReference ( tempHolder ) ) { long newPointer = partition . appendRecord ( record ) ; bucket . putLong ( pointerOffset , newPointer ) ; partition . setCompaction ( false ) ; if ( ( newPointer >> this . pageSizeInBits ) > this . compactionMemory . getBlockCount ( ) ) { this . compactionMemory . allocateSegments ( ( int ) ( newPointer >> this . pageSizeInBits ) ) ; } return ; } } catch ( EOFException e ) { // system is out of memory so we attempt to reclaim memory with a copy compact run long newPointer ; try { compactPartition ( partition . getPartitionNumber ( ) ) ; // retry append newPointer = this . partitions . get ( partitionNumber ) . appendRecord ( record ) ; } catch ( EOFException ex ) { throw new RuntimeException ( "Memory ran out. Compaction failed. numPartitions: " + this . partitions . size ( ) + " minPartition: " + getMinPartition ( ) + " maxPartition: " + getMaxPartition ( ) + " bucketSize: " + this . buckets . length + " Message: " + ex . getMessage ( ) ) ; } catch ( IndexOutOfBoundsException ex ) { throw new RuntimeException ( "Memory ran out. Compaction failed. numPartitions: " + this . partitions . size ( ) + " minPartition: " + getMinPartition ( ) + " maxPartition: " + getMaxPartition ( ) + " bucketSize: " + this . buckets . length + " Message: " + ex . getMessage ( ) ) ; } bucket . putLong ( pointerOffset , newPointer ) ; return ; } catch ( IndexOutOfBoundsException e ) { // system is out of memory so we attempt to reclaim memory with a copy compact run long newPointer ; try { compactPartition ( partition . getPartitionNumber ( ) ) ; // retry append newPointer = this . partitions . get ( partitionNumber ) . appendRecord ( record ) ; } catch ( EOFException ex ) { throw new RuntimeException ( "Memory ran out. Compaction failed. numPartitions: " + this . partitions . size ( ) + " minPartition: " + getMinPartition ( ) + " maxPartition: " + getMaxPartition ( ) + " bucketSize: " + this . buckets . length + " Message: " + ex . getMessage ( ) ) ; } catch ( IndexOutOfBoundsException ex ) { throw new RuntimeException ( "Memory ran out. Compaction failed. numPartitions: " + this . partitions . size ( ) + " minPartition: " + getMinPartition ( ) + " maxPartition: " + getMaxPartition ( ) + " bucketSize: " + this . buckets . length + " Message: " + ex . getMessage ( ) ) ; } bucket . putLong ( pointerOffset , newPointer ) ; return ; } catch ( IOException e ) { throw new RuntimeException ( "Error deserializing record from the hashtable: " + e . getMessage ( ) , e ) ; } } else { numInSegment ++ ; } } // this segment is done . check if there is another chained bucket long newForwardPointer = bucket . getLong ( bucketInSegmentOffset + HEADER_FORWARD_OFFSET ) ; if ( newForwardPointer == BUCKET_FORWARD_POINTER_NOT_SET ) { // nothing found . append and insert long pointer = partition . appendRecord ( record ) ; insertBucketEntryFromSearch ( partition , originalBucket , bucket , originalBucketOffset , bucketInSegmentOffset , countInSegment , currentForwardPointer , searchHashCode , pointer ) ; if ( ( pointer >> this . pageSizeInBits ) > this . compactionMemory . getBlockCount ( ) ) { this . compactionMemory . allocateSegments ( ( int ) ( pointer >> this . pageSizeInBits ) ) ; } return ; } final int overflowSegNum = ( int ) ( newForwardPointer >>> 32 ) ; bucket = overflowSegments [ overflowSegNum ] ; bucketInSegmentOffset = ( int ) ( newForwardPointer & 0xffffffff ) ; countInSegment = bucket . getInt ( bucketInSegmentOffset + HEADER_COUNT_OFFSET ) ; posInSegment = bucketInSegmentOffset + BUCKET_HEADER_LENGTH ; numInSegment = 0 ; currentForwardPointer = newForwardPointer ; }
public class SSLContext { /** * Cipher Suite available for negotiation in SSL handshake . * < br > * This complex directive uses a colon - separated cipher - spec string consisting * of OpenSSL cipher specifications to configure the Cipher Suite the client * is permitted to negotiate in the SSL handshake phase . Notice that this * directive can be used both in per - server and per - directory context . * In per - server context it applies to the standard SSL handshake when a * connection is established . In per - directory context it forces a SSL * renegotiation with the reconfigured Cipher Suite after the HTTP request * was read but before the HTTP response is sent . * @ param ctx Server or Client context to use . * @ param ciphers An SSL cipher specification . * @ return { @ code true } if successful * @ throws Exception if an error happened * @ deprecated Use { @ link # setCipherSuite ( long , String , boolean ) } . */ @ Deprecated public static boolean setCipherSuite ( long ctx , String ciphers ) throws Exception { } }
return setCipherSuite ( ctx , ciphers , false ) ;
public class AWSDeviceFarmClient { /** * Deletes the run , given the run ARN . * < b > Note < / b > Deleting this resource does not stop an in - progress run . * @ param deleteRunRequest * Represents a request to the delete run operation . * @ return Result of the DeleteRun operation returned by the service . * @ throws ArgumentException * An invalid argument was specified . * @ throws NotFoundException * The specified entity was not found . * @ throws LimitExceededException * A limit was exceeded . * @ throws ServiceAccountException * There was a problem with the service account . * @ sample AWSDeviceFarm . DeleteRun * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / devicefarm - 2015-06-23 / DeleteRun " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteRunResult deleteRun ( DeleteRunRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteRun ( request ) ;
public class HldSeSelEntityProcNms { /** * < p > Get processor name for Entity with file delete . < / p > * @ param pClass a Class * @ return a thing */ protected final String getForFDelete ( final Class < ? > pClass ) { } }
if ( SeGoodsSpecifics . class == pClass || SeServiceSpecifics . class == pClass ) { return PrcEntityFDelete . class . getSimpleName ( ) ; } return null ;
public class PrcPaymentToSave { /** * < p > Make save preparations before insert / update block if it ' s need . < / p > * @ param pAddParam additional param * @ param pEntity entity * @ param pRequestData Request Data * @ throws Exception - an exception */ @ Override public final void makeFirstPrepareForSave ( final Map < String , Object > pAddParam , final PaymentTo pEntity , final IRequestData pRequestData ) throws Exception { } }
// BeigeORM refresh : pEntity . setAccCash ( getSrvOrm ( ) . retrieveEntity ( pAddParam , pEntity . getAccCash ( ) ) ) ; if ( pEntity . getAccCash ( ) . getSubaccType ( ) != null && pEntity . getSubaccCashId ( ) == null ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "select_subaccount" ) ; } if ( pEntity . getPurchaseInvoice ( ) != null ) { PurchaseInvoice purchaseInvoice = getSrvOrm ( ) . retrieveEntity ( pAddParam , pEntity . getPurchaseInvoice ( ) ) ; if ( ! purchaseInvoice . getHasMadeAccEntries ( ) || purchaseInvoice . getReversedId ( ) != null ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "purchase_invoice_must_be_accounted" ) ; } pEntity . setPurchaseInvoice ( purchaseInvoice ) ; }
public class DefaultCommandMethodVerifier { /** * Verify a { @ link CommandMethod } with its { @ link CommandSegments } . This method verifies that the command exists and that * the required number of arguments is declared . * @ param commandSegments * @ param commandMethod */ public void validate ( CommandSegments commandSegments , CommandMethod commandMethod ) throws CommandMethodSyntaxException { } }
LettuceAssert . notEmpty ( commandSegments . getCommandType ( ) . name ( ) , "Command name must not be empty" ) ; CommandDetail commandDetail = findCommandDetail ( commandSegments . getCommandType ( ) . name ( ) ) . orElseThrow ( ( ) -> syntaxException ( commandSegments . getCommandType ( ) . name ( ) , commandMethod ) ) ; validateParameters ( commandDetail , commandSegments , commandMethod ) ;
public class StorageReader { /** * Read the data at the given offset , the data can be spread over multiple data buffers */ private byte [ ] getMMapBytes ( long offset ) throws IOException { } }
// Read the first 4 bytes to get the size of the data ByteBuffer buf = getDataBuffer ( offset ) ; int maxLen = ( int ) Math . min ( 5 , dataSize - offset ) ; int size ; if ( buf . remaining ( ) >= maxLen ) { // Continuous read int pos = buf . position ( ) ; size = LongPacker . unpackInt ( buf ) ; // Used in case of data is spread over multiple buffers offset += buf . position ( ) - pos ; } else { // The size of the data is spread over multiple buffers int len = maxLen ; int off = 0 ; sizeBuffer . reset ( ) ; while ( len > 0 ) { buf = getDataBuffer ( offset + off ) ; int count = Math . min ( len , buf . remaining ( ) ) ; buf . get ( sizeBuffer . getBuf ( ) , off , count ) ; off += count ; len -= count ; } size = LongPacker . unpackInt ( sizeBuffer ) ; offset += sizeBuffer . getPos ( ) ; buf = getDataBuffer ( offset ) ; } // Create output bytes byte [ ] res = new byte [ size ] ; // Check if the data is one buffer if ( buf . remaining ( ) >= size ) { // Continuous read buf . get ( res , 0 , size ) ; } else { int len = size ; int off = 0 ; while ( len > 0 ) { buf = getDataBuffer ( offset ) ; int count = Math . min ( len , buf . remaining ( ) ) ; buf . get ( res , off , count ) ; offset += count ; off += count ; len -= count ; } } return res ;
public class LazyReact { /** * Start a reactive flow from a Collection using an Iterator * < pre > * { @ code * new LazyReact ( 10,10 ) . from ( myList ) * . map ( this : : process ) ; * < / pre > * @ param collection - Collection SimpleReact will iterate over at the skip of the flow * @ return FutureStream */ @ SuppressWarnings ( "unchecked" ) public < R > FutureStream < R > from ( final Collection < R > collection ) { } }
return fromStream ( collection . stream ( ) ) ;
public class MultiBuf { /** * Reads data from the channel and appends it to the buffer . * Precondition : received event that the channel has data to be read . */ @ Override public int append ( ReadableByteChannel channel ) throws IOException { } }
assert invariant ( true ) ; int totalRead = 0 ; try { boolean done ; // precondition : the channel has data do { ByteBuffer dest = writableBuf ( ) ; int space = dest . remaining ( ) ; assert space > 0 ; int read = channel . read ( dest ) ; if ( read >= 0 ) { totalRead += read ; } else { // end of stream ( e . g . the other end closed the connection ) removeLastBufferIfEmpty ( ) ; sizeChanged ( ) ; assert invariant ( true ) ; return - 1 ; } // if buffer wasn ' t filled - > no data is available in channel done = read < space ; } while ( ! done ) ; } finally { removeLastBufferIfEmpty ( ) ; sizeChanged ( ) ; assert invariant ( true ) ; } return totalRead ;
public class StreamMetrics { /** * This method increments the counter of failed Stream deletions in the system as well as the failed deletion * attempts for this specific Stream . * @ param scope Scope . * @ param streamName Name of the Stream . */ public void deleteStreamFailed ( String scope , String streamName ) { } }
DYNAMIC_LOGGER . incCounterValue ( globalMetricName ( DELETE_STREAM_FAILED ) , 1 ) ; DYNAMIC_LOGGER . incCounterValue ( DELETE_STREAM_FAILED , 1 , streamTags ( scope , streamName ) ) ;
public class BeanMapping { /** * 创建srcClass和targetClass之间的BeanMapping操作 */ public static BeanMapping create ( Class srcClass , Class targetClass ) { } }
BeanMappingObject config = BeanMappingConfigHelper . getInstance ( ) . getBeanMappingObject ( srcClass , targetClass ) ; if ( config == null ) { throw new BeanMappingException ( "can not found mapping config for srcClass[" + srcClass . toString ( ) + "] targetClass[" + targetClass + "]" ) ; } return new BeanMapping ( config ) ;
public class TableCellWithActionExample { /** * Creates and configures the table to be used by the example . The table is configured with global rather than user * data . Although this is not a realistic scenario , it will suffice for this example . * @ return a new configured table . */ private WDataTable createTable ( ) { } }
WDataTable table = new WDataTable ( ) ; table . addColumn ( new WTableColumn ( "First name" , new WTextField ( ) ) ) ; table . addColumn ( new WTableColumn ( "Last name" , new WTextField ( ) ) ) ; table . addColumn ( new WTableColumn ( "DOB" , new WDateField ( ) ) ) ; table . addColumn ( new WTableColumn ( "Action" , new ExampleButton ( ) ) ) ; table . setExpandMode ( ExpandMode . CLIENT ) ; table . setDataModel ( createTableModel ( ) ) ; return table ;
public class PersonGroupPersonsImpl { /** * Create a new person in a specified person group . * @ param personGroupId Id referencing a particular person group . * @ param createOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Person object */ public Observable < Person > createAsync ( String personGroupId , CreatePersonGroupPersonsOptionalParameter createOptionalParameter ) { } }
return createWithServiceResponseAsync ( personGroupId , createOptionalParameter ) . map ( new Func1 < ServiceResponse < Person > , Person > ( ) { @ Override public Person call ( ServiceResponse < Person > response ) { return response . body ( ) ; } } ) ;
public class SBGNLayoutManager { /** * This method replaces ports of arc objects with their owners . * @ param arcs Arc list of SBGN model */ private void removePortsFromArcs ( List < Arc > arcs ) { } }
for ( Arc arc : arcs ) { // If source is port , first clear port indicators else retrieve it from hashmaps if ( arc . getSource ( ) instanceof Port ) { Glyph source = portIDToOwnerGlyph . get ( ( ( Port ) arc . getSource ( ) ) . getId ( ) ) ; arc . setSource ( source ) ; } // If target is port , first clear port indicators else retrieve it from hashmaps if ( arc . getTarget ( ) instanceof Port ) { Glyph target = portIDToOwnerGlyph . get ( ( ( Port ) arc . getTarget ( ) ) . getId ( ) ) ; arc . setTarget ( target ) ; } }
public class Task { /** * Blocks until the task is complete or times out . * @ return { @ code true } if the task completed ( has a result , an error , or was cancelled ) . * { @ code false } otherwise . */ public boolean waitForCompletion ( long duration , TimeUnit timeUnit ) throws InterruptedException { } }
synchronized ( lock ) { if ( ! isCompleted ( ) ) { lock . wait ( timeUnit . toMillis ( duration ) ) ; } return isCompleted ( ) ; }
public class NmeaStreamProcessor { /** * Returns the index of the closest matching message in terms of arrival * time . Matching equates to having a referenced checksum . If not found * returns null . If there is a problem parsing the message an * { @ link AisParseException } is thrown . * @ param time * @ param checksum * @ return */ private Integer findClosestMatchingMessageInTermsOfArrivalTime ( long time , String checksum ) { } }
// find the closest matching ais message in terms of arrival time Long lowestTimeDiff = null ; Integer lowestTimeDiffIndex = null ; for ( int i = 0 ; i < getNumLines ( ) ; i ++ ) { String msg = getLine ( i ) ; Long msgTime = getLineTime ( i ) ; if ( ! isExactEarthTimestamp ( msg ) ) { try { AisNmeaMessage nmea = new AisNmeaMessage ( msg ) ; if ( nmea . getChecksum ( ) . equals ( checksum ) ) { long diff = Math . abs ( msgTime - time ) ; boolean closer = ( lowestTimeDiff == null || diff < lowestTimeDiff ) && ( diff <= MAXIMUM_ARRIVAL_TIME_DIFFERENCE_MS ) ; if ( closer ) { lowestTimeDiff = diff ; lowestTimeDiffIndex = i ; } } } catch ( AisParseException e ) { log . debug ( e . getMessage ( ) ) ; } } } return lowestTimeDiffIndex ;
public class TransportProtocol { /** * Disconnect from the remote host . No more messages can be sent after this * method has been called . * @ param reason * @ param disconnectReason * , description * @ throws IOException */ public void disconnect ( int reason , String disconnectReason ) { } }
ByteArrayWriter baw = new ByteArrayWriter ( ) ; try { this . disconnectReason = disconnectReason ; baw . write ( SSH_MSG_DISCONNECT ) ; baw . writeInt ( reason ) ; baw . writeString ( disconnectReason ) ; baw . writeString ( "" ) ; Log . info ( this , "Sending SSH_MSG_DISCONNECT [" + disconnectReason + "]" ) ; sendMessage ( baw . toByteArray ( ) , true ) ; } catch ( Throwable t ) { } finally { try { baw . close ( ) ; } catch ( IOException e ) { } internalDisconnect ( ) ; }
public class PubSubCommandHandler { /** * Check whether { @ link ResponseHeaderReplayOutput } contains a Pub / Sub message that requires Pub / Sub dispatch instead of to * be used as Command output . * @ param replay * @ return */ private static boolean isPubSubMessage ( ResponseHeaderReplayOutput < ? , ? > replay ) { } }
if ( replay == null ) { return false ; } String firstElement = replay . firstElement ; if ( replay . multiCount != null && firstElement != null ) { if ( replay . multiCount == 3 && firstElement . equalsIgnoreCase ( PubSubOutput . Type . message . name ( ) ) ) { return true ; } if ( replay . multiCount == 4 && firstElement . equalsIgnoreCase ( PubSubOutput . Type . pmessage . name ( ) ) ) { return true ; } } return false ;
public class KeyValuePairKeyComparator { /** * { @ inheritDoc } */ @ Override public int compare ( final KeyValuePair < K , V > o1 , final KeyValuePair < K , V > o2 ) { } }
return o1 . getKey ( ) . compareTo ( o2 . getKey ( ) ) ;
public class WssServiceClientImpl { /** * The method extract the data from the given { @ link org . whitesource . agent . api . dispatch . ResultEnvelope } . * @ param response HTTP response as string . * @ return String with logical result in JSON format . * @ throws IOException * @ throws WssServiceException */ protected String extractResultData ( String response ) throws IOException , WssServiceException { } }
// parse response ResultEnvelope envelope = gson . fromJson ( response , ResultEnvelope . class ) ; if ( envelope == null ) { throw new WssServiceException ( "Empty response, response data is: " + response ) ; } // extract info from envelope String message = envelope . getMessage ( ) ; String data = envelope . getData ( ) ; String requestToken = envelope . getRequestToken ( ) ; // service fault ? if ( ResultEnvelope . STATUS_SUCCESS != envelope . getStatus ( ) ) { throw new WssServiceException ( message + ": " + data , requestToken ) ; } return data ;
public class DatabasesInner { /** * Add Database principals permissions . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ param databaseName The name of the database in the Kusto cluster . * @ param value The list of Kusto database principals . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DatabasePrincipalListResultInner object */ public Observable < ServiceResponse < DatabasePrincipalListResultInner > > addPrincipalsWithServiceResponseAsync ( String resourceGroupName , String clusterName , String databaseName , List < DatabasePrincipalInner > value ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( databaseName == null ) { throw new IllegalArgumentException ( "Parameter databaseName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( value ) ; DatabasePrincipalListRequest databasePrincipalsToAdd = new DatabasePrincipalListRequest ( ) ; databasePrincipalsToAdd . withValue ( value ) ; return service . addPrincipals ( resourceGroupName , clusterName , databaseName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , databasePrincipalsToAdd , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < DatabasePrincipalListResultInner > > > ( ) { @ Override public Observable < ServiceResponse < DatabasePrincipalListResultInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < DatabasePrincipalListResultInner > clientResponse = addPrincipalsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
import java . util . * ; class Main { /** * This function creates a list by inserting given string after every element of the input tuple . * > > > insert _ string ( { 5 , 6 , 7 , 4 , 9 } , ' FDF ' ) * [ 5 , ' FDF ' , 6 , ' FDF ' , 7 , ' FDF ' , 4 , ' FDF ' , 9 , ' FDF ' ] * > > > insert _ string ( { 7 , 8 , 9 , 10 } , ' PF ' ) * [ 7 , ' PF ' , 8 , ' PF ' , 9 , ' PF ' , 10 , ' PF ' ] * > > > insert _ string ( { 11 , 14 , 12 , 1 , 4 } , ' JH ' ) * [ 11 , ' JH ' , 14 , ' JH ' , 12 , ' JH ' , 1 , ' JH ' , 4 , ' JH ' ] * @ param input _ tuple The input tuple . * @ param input _ str The string to be inserted . * @ returns A list with expanded elements . */ public static List < Object > insertString ( List < Integer > inputList , String inputStr ) { } }
List < Object > result = new ArrayList < > ( ) ; for ( Integer ele : inputList ) { result . add ( ele ) ; result . add ( inputStr ) ; } return result ;
public class JWTPayload { /** * Sets the subject claim identifying the principal that is the subject of the JWT or { @ code null } for none . * Overriding is only supported for the purpose of calling the super implementation and changing * the return type , but nothing else . */ public JWTPayload setSubject ( String subject ) { } }
this . subject = subject ; this . put ( PayloadConstants . SUBJECT , subject ) ; return this ;
public class KafkaSimpleStreamingExtractor { /** * Return the next record when available . Will never time out since this is a streaming source . */ @ Override public RecordEnvelope < D > readRecordEnvelopeImpl ( ) throws DataRecordException , IOException { } }
if ( ! _isStarted . get ( ) ) { throw new IOException ( "Streaming extractor has not been started." ) ; } while ( ( _records == null ) || ( ! _records . hasNext ( ) ) ) { synchronized ( _consumer ) { if ( _close . get ( ) ) { throw new ClosedChannelException ( ) ; } _records = _consumer . poll ( this . fetchTimeOut ) . iterator ( ) ; } } ConsumerRecord < S , D > record = _records . next ( ) ; _rowCount . getAndIncrement ( ) ; return new RecordEnvelope < D > ( record . value ( ) , new KafkaWatermark ( _partition , new LongWatermark ( record . offset ( ) ) ) ) ;
public class TransformerFactoryImpl { /** * Look up the value of a feature . * < p > The feature name is any fully - qualified URI . It is * possible for an TransformerFactory to recognize a feature name but * to be unable to return its value ; this is especially true * in the case of an adapter for a SAX1 Parser , which has * no way of knowing whether the underlying parser is * validating , for example . < / p > * @ param name The feature name , which is a fully - qualified URI . * @ return The current state of the feature ( true or false ) . */ public boolean getFeature ( String name ) { } }
// feature name cannot be null if ( name == null ) { throw new NullPointerException ( XSLMessages . createMessage ( XSLTErrorResources . ER_GET_FEATURE_NULL_NAME , null ) ) ; } // Try first with identity comparison , which // will be faster . if ( ( DOMResult . FEATURE == name ) || ( DOMSource . FEATURE == name ) || ( SAXResult . FEATURE == name ) || ( SAXSource . FEATURE == name ) || ( StreamResult . FEATURE == name ) || ( StreamSource . FEATURE == name ) || ( SAXTransformerFactory . FEATURE == name ) || ( SAXTransformerFactory . FEATURE_XMLFILTER == name ) ) return true ; else if ( ( DOMResult . FEATURE . equals ( name ) ) || ( DOMSource . FEATURE . equals ( name ) ) || ( SAXResult . FEATURE . equals ( name ) ) || ( SAXSource . FEATURE . equals ( name ) ) || ( StreamResult . FEATURE . equals ( name ) ) || ( StreamSource . FEATURE . equals ( name ) ) || ( SAXTransformerFactory . FEATURE . equals ( name ) ) || ( SAXTransformerFactory . FEATURE_XMLFILTER . equals ( name ) ) ) return true ; // secure processing ? else if ( name . equals ( XMLConstants . FEATURE_SECURE_PROCESSING ) ) return m_isSecureProcessing ; else // unknown feature return false ;
public class CommerceShippingFixedOptionRelUtil { /** * Returns the commerce shipping fixed option rel with the primary key or throws a { @ link NoSuchShippingFixedOptionRelException } if it could not be found . * @ param commerceShippingFixedOptionRelId the primary key of the commerce shipping fixed option rel * @ return the commerce shipping fixed option rel * @ throws NoSuchShippingFixedOptionRelException if a commerce shipping fixed option rel with the primary key could not be found */ public static CommerceShippingFixedOptionRel findByPrimaryKey ( long commerceShippingFixedOptionRelId ) throws com . liferay . commerce . shipping . engine . fixed . exception . NoSuchShippingFixedOptionRelException { } }
return getPersistence ( ) . findByPrimaryKey ( commerceShippingFixedOptionRelId ) ;
public class RTMPProtocolEncoder { /** * { @ inheritDoc } */ public IoBuffer encodeBytesRead ( BytesRead bytesRead ) { } }
final IoBuffer out = IoBuffer . allocate ( 4 ) ; out . putInt ( bytesRead . getBytesRead ( ) ) ; return out ;
public class StringParser { /** * Parse the given { @ link String } as unsigned { @ link Integer } with radix * { @ link # DEFAULT _ RADIX } . * @ param sStr * The string to parse . May be < code > null < / code > . * @ param aDefault * The default value to be returned if the passed string could not be * converted to a valid value . May be < code > null < / code > . * @ return < code > aDefault < / code > if the string does not represent a valid * value . * @ since 9.0.0 */ @ Nullable public static Long parseUnsignedIntObj ( @ Nullable final String sStr , @ Nullable final Long aDefault ) { } }
return parseUnsignedIntObj ( sStr , DEFAULT_RADIX , aDefault ) ;
public class Params { /** * Test if file parameter is not null and has not empty path . * @ param parameter invocation file parameter , * @ param name the name of invocation parameter . * @ throws IllegalArgumentException if < code > parameter < / code > is null or its path is empty . */ public static void notNullOrEmpty ( File parameter , String name ) throws IllegalArgumentException { } }
if ( parameter == null || parameter . getPath ( ) . isEmpty ( ) ) { throw new IllegalArgumentException ( name + " path is null or empty." ) ; }
public class SessionMapper { /** * Remove all non - serializable fields from session . */ public void sessionWillPassivate ( HttpSessionEvent event ) { } }
for ( Entry < String , SessionFieldMapper > entry : this . entrySet ( ) ) { if ( ! entry . getValue ( ) . serializable ) { event . getSession ( ) . removeAttribute ( entry . getKey ( ) ) ; } }
public class MultiInstanceActivityBehavior { @ SuppressWarnings ( "rawtypes" ) protected int resolveNrOfInstances ( DelegateExecution execution ) { } }
if ( loopCardinalityExpression != null ) { return resolveLoopCardinality ( execution ) ; } else if ( usesCollection ( ) ) { Collection collection = resolveAndValidateCollection ( execution ) ; return collection . size ( ) ; } else { throw new ActivitiIllegalArgumentException ( "Couldn't resolve collection expression nor variable reference" ) ; }
public class CmsUIServlet { /** * Sets the current cms context . < p > * @ param cms the current cms context to set */ public synchronized void setCms ( CmsObject cms ) { } }
if ( m_perThreadCmsObject == null ) { m_perThreadCmsObject = new ThreadLocal < CmsObject > ( ) ; } m_perThreadCmsObject . set ( cms ) ;
public class CalendarThinTableModel { /** * Notify listeners this row is selected ; pass a - 1 to de - select all rows . */ public void fireTableRowSelected ( Object source , int iRowIndex , int iSelectionType ) { } }
this . fireMySelectionChanged ( new MyListSelectionEvent ( source , this , iRowIndex , iSelectionType ) ) ;
public class Util { /** * Adapted from { @ code com . google . common . io . ByteStreams . toByteArray ( ) } . */ public static byte [ ] toByteArray ( InputStream in ) throws IOException { } }
checkNotNull ( in , "in" ) ; try { ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; copy ( in , out ) ; return out . toByteArray ( ) ; } finally { ensureClosed ( in ) ; }
public class StringUtils { /** * Returns true if given text contains given word ; false otherwise . * @ param text string text to be tested . * @ param word string word to be tested . * @ return true if given text contains given word ; false otherwise . */ public static boolean containsWord ( final String text , final String word ) { } }
if ( text == null || word == null ) { return false ; } if ( text . contains ( word ) ) { Matcher matcher = matchText ( text ) ; for ( ; matcher . find ( ) ; ) { String matchedWord = matcher . group ( 0 ) ; if ( matchedWord . equals ( word ) ) { return true ; } } } return false ;
public class BinomialBoundsN { /** * The above remarks about specialNStar ( ) also apply here . */ private static long specialNPrimeB ( final long numSamplesI , final double p , final double delta ) { } }
final double q , numSamplesF , oneMinusDelta ; double tot , curTerm ; long m ; assertTrue ( numSamplesI >= 1 ) ; assertTrue ( ( 0.0 < p ) && ( p < 1.0 ) ) ; assertTrue ( ( 0.0 < delta ) && ( delta < 1.0 ) ) ; q = 1.0 - p ; oneMinusDelta = 1.0 - delta ; numSamplesF = numSamplesI ; curTerm = Math . pow ( p , numSamplesF ) ; // curTerm = posteriorProbability ( k , k , p ) assertTrue ( curTerm > 1e-100 ) ; // sanity check for non - use of logarithms tot = curTerm ; m = numSamplesI ; while ( tot < oneMinusDelta ) { curTerm = ( curTerm * q * ( m ) ) / ( ( m + 1 ) - numSamplesI ) ; tot += curTerm ; m += 1 ; } return ( m ) ; // don ' t need to back up
public class JsonRequestHandler { /** * Handle a request for a single server and throw an * { @ link javax . management . InstanceNotFoundException } * if the request cannot be handle by the provided server . * Does a check for restrictions as well * @ param pServer server to try * @ param pRequest request to process * @ return the object result from the request * @ throws InstanceNotFoundException if the provided server cant handle the request * @ throws AttributeNotFoundException * @ throws ReflectionException * @ throws MBeanException * @ throws java . io . IOException */ public Object handleRequest ( MBeanServerConnection pServer , R pRequest ) throws InstanceNotFoundException , AttributeNotFoundException , ReflectionException , MBeanException , IOException , NotChangedException { } }
checkForRestriction ( pRequest ) ; checkHttpMethod ( pRequest ) ; return doHandleRequest ( pServer , pRequest ) ;
public class EntitySerDe { /** * Deserialize the entity field that has a keyAsColumn mapping . * @ param fieldName * The name of the entity ' s field we are deserializing . * @ param family * The column family this field is mapped to * @ param prefix * The column qualifier prefix each * @ param result * The HBase Result that represents a row in HBase . * @ return The deserialized entity field value . */ private Object deserializeKeyAsColumn ( String fieldName , byte [ ] family , String prefix , Result result ) { } }
// Construct a map of keyAsColumn field values . From this we ' ll be able // to use the entityComposer to construct the entity field value . byte [ ] prefixBytes = prefix != null ? prefix . getBytes ( ) : null ; Map < CharSequence , Object > fieldValueAsMap = new HashMap < CharSequence , Object > ( ) ; Map < byte [ ] , byte [ ] > familyMap = result . getFamilyMap ( family ) ; for ( Map . Entry < byte [ ] , byte [ ] > entry : familyMap . entrySet ( ) ) { byte [ ] qualifier = entry . getKey ( ) ; // if the qualifier of this column has a prefix that matches the // field prefix , then remove the prefix from the qualifier . if ( prefixBytes != null && qualifier . length > prefixBytes . length && Arrays . equals ( Arrays . copyOf ( qualifier , prefixBytes . length ) , prefixBytes ) ) { qualifier = Arrays . copyOfRange ( qualifier , prefixBytes . length , qualifier . length ) ; } byte [ ] columnBytes = entry . getValue ( ) ; CharSequence keyAsColumnKey = deserializeKeyAsColumnKeyFromBytes ( fieldName , qualifier ) ; Object keyAsColumnValue = deserializeKeyAsColumnValueFromBytes ( fieldName , qualifier , columnBytes ) ; fieldValueAsMap . put ( keyAsColumnKey , keyAsColumnValue ) ; } // Now build the entity field from the fieldValueAsMap . return entityComposer . buildKeyAsColumnField ( fieldName , fieldValueAsMap ) ;
public class PrcManufacturingProcessSave { /** * < p > Make save preparations before insert / update block if it ' s need . < / p > * @ param pAddParam additional param * @ param pEntity entity * @ param pRequestData Request Data * @ throws Exception - an exception */ @ Override public final void makeFirstPrepareForSave ( final Map < String , Object > pAddParam , final ManufacturingProcess pEntity , final IRequestData pRequestData ) throws Exception { } }
if ( pEntity . getItsQuantity ( ) . doubleValue ( ) == 0 ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "Quantity is 0! " + pAddParam . get ( "user" ) ) ; } pEntity . setInvItem ( getSrvOrm ( ) . retrieveEntity ( pAddParam , pEntity . getInvItem ( ) ) ) ; if ( ! InvItem . WORK_IN_PROGRESS_ID . equals ( pEntity . getInvItem ( ) . getItsType ( ) . getItsId ( ) ) ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "type_must_be_work_in_progress" ) ; } if ( pEntity . getReversedId ( ) != null ) { pEntity . setIsComplete ( true ) ; pEntity . setTheRest ( BigDecimal . ZERO ) ; } else { pEntity . setTheRest ( pEntity . getItsQuantity ( ) ) ; } // rounding : pEntity . setItsQuantity ( pEntity . getItsQuantity ( ) . setScale ( getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getQuantityPrecision ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getRoundingMode ( ) ) ) ; pEntity . setItsCost ( pEntity . getItsTotal ( ) . divide ( pEntity . getItsQuantity ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getCostPrecision ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getRoundingMode ( ) ) ) ;
public class LightWeightBitSet { /** * Set the bit for the given position to false . * @ param bits - bitset * @ param pos - position to be set to false */ public static void clear ( long [ ] bits , int pos ) { } }
int offset = pos >> LONG_SHIFT ; if ( offset >= bits . length ) throw new IndexOutOfBoundsException ( ) ; bits [ offset ] &= ~ ( 1L << pos ) ;
public class RadialCounter { /** * < editor - fold defaultstate = " collapsed " desc = " Visualization " > */ @ Override protected void paintComponent ( Graphics g ) { } }
if ( ! isInitialized ( ) ) { return ; } final Graphics2D G2 = ( Graphics2D ) g . create ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_RENDERING , RenderingHints . VALUE_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; G2 . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; // Translate the coordinate system related to insets G2 . translate ( getInnerBounds ( ) . x , getInnerBounds ( ) . y ) ; final AffineTransform OLD_TRANSFORM = G2 . getTransform ( ) ; // Draw the frame if ( isFrameVisible ( ) ) { G2 . drawImage ( frameImage , 0 , 0 , null ) ; } // Draw the background if ( isBackgroundVisible ( ) ) { G2 . drawImage ( backgroundImage , 0 , 0 , null ) ; } // Draw the tickmarks if ( rotateTickmarks ) { G2 . rotate ( - rotationAngle , CENTER . getX ( ) , CENTER . getY ( ) ) ; G2 . drawImage ( tickmarksImage , 0 , 0 , null ) ; G2 . setTransform ( OLD_TRANSFORM ) ; } else { G2 . drawImage ( tickmarksImage , 0 , 0 , null ) ; } // Draw pointer if ( ! rotateTickmarks ) { G2 . rotate ( rotationAngle , CENTER . getX ( ) , CENTER . getY ( ) ) ; G2 . drawImage ( pointerImage , 0 , 0 , null ) ; G2 . setTransform ( OLD_TRANSFORM ) ; } else { G2 . drawImage ( pointerImage , 0 , 0 , null ) ; } // Draw the overlay G2 . drawImage ( overlayImage , 0 , 0 , null ) ; // Draw LCD display if ( isLcdVisible ( ) ) { // G2 . drawImage ( lcdImage , ( int ) ( ( getGaugeBounds ( ) . width - lcdImage . getWidth ( ) ) / 2.0 ) , ( int ) ( getGaugeBounds ( ) . height * 0.55 ) , null ) ; G2 . drawImage ( lcdImage , ( int ) ( LCD . getX ( ) ) , ( int ) ( LCD . getY ( ) ) , null ) ; if ( getLcdColor ( ) == LcdColor . CUSTOM ) { G2 . setColor ( getCustomLcdForeground ( ) ) ; } else { G2 . setColor ( getLcdColor ( ) . TEXT_COLOR ) ; } G2 . setFont ( getLcdUnitFont ( ) ) ; if ( isLcdUnitStringVisible ( ) ) { unitLayout = new TextLayout ( getLcdUnitString ( ) , G2 . getFont ( ) , RENDER_CONTEXT ) ; UNIT_BOUNDARY . setFrame ( unitLayout . getBounds ( ) ) ; G2 . drawString ( getLcdUnitString ( ) , ( int ) ( LCD . getX ( ) + ( LCD . getWidth ( ) - UNIT_BOUNDARY . getWidth ( ) ) - LCD . getWidth ( ) * 0.03 ) , ( int ) ( LCD . getY ( ) + LCD . getHeight ( ) * 0.76 ) ) ; unitStringWidth = UNIT_BOUNDARY . getWidth ( ) ; } else { unitStringWidth = 0 ; } G2 . setFont ( getLcdValueFont ( ) ) ; switch ( getModel ( ) . getNumberSystem ( ) ) { case HEX : valueLayout = new TextLayout ( Integer . toHexString ( ( int ) getLcdValue ( ) ) . toUpperCase ( ) , G2 . getFont ( ) , RENDER_CONTEXT ) ; VALUE_BOUNDARY . setFrame ( valueLayout . getBounds ( ) ) ; G2 . drawString ( Integer . toHexString ( ( int ) getLcdValue ( ) ) . toUpperCase ( ) , ( int ) ( LCD . getX ( ) + ( LCD . getWidth ( ) - unitStringWidth - VALUE_BOUNDARY . getWidth ( ) ) - LCD . getWidth ( ) * 0.09 ) , ( int ) ( LCD . getY ( ) + lcdImage . getHeight ( ) * 0.76 ) ) ; break ; case OCT : valueLayout = new TextLayout ( Integer . toOctalString ( ( int ) getLcdValue ( ) ) , G2 . getFont ( ) , RENDER_CONTEXT ) ; VALUE_BOUNDARY . setFrame ( valueLayout . getBounds ( ) ) ; G2 . drawString ( Integer . toOctalString ( ( int ) getLcdValue ( ) ) , ( int ) ( LCD . getX ( ) + ( LCD . getWidth ( ) - unitStringWidth - VALUE_BOUNDARY . getWidth ( ) ) - LCD . getWidth ( ) * 0.09 ) , ( int ) ( LCD . getY ( ) + lcdImage . getHeight ( ) * 0.76 ) ) ; break ; case DEC : default : valueLayout = new TextLayout ( formatLcdValue ( getLcdValue ( ) ) , G2 . getFont ( ) , RENDER_CONTEXT ) ; VALUE_BOUNDARY . setFrame ( valueLayout . getBounds ( ) ) ; G2 . drawString ( formatLcdValue ( getLcdValue ( ) ) , ( int ) ( LCD . getX ( ) + ( LCD . getWidth ( ) - unitStringWidth - VALUE_BOUNDARY . getWidth ( ) ) - LCD . getWidth ( ) * 0.09 ) , ( int ) ( LCD . getY ( ) + lcdImage . getHeight ( ) * 0.76 ) ) ; break ; } // Draw lcd info string if ( ! getLcdInfoString ( ) . isEmpty ( ) ) { G2 . setFont ( getLcdInfoFont ( ) ) ; infoLayout = new TextLayout ( getLcdInfoString ( ) , G2 . getFont ( ) , RENDER_CONTEXT ) ; INFO_BOUNDARY . setFrame ( infoLayout . getBounds ( ) ) ; G2 . drawString ( getLcdInfoString ( ) , LCD . getBounds ( ) . x + 5 , LCD . getBounds ( ) . y + ( int ) INFO_BOUNDARY . getHeight ( ) + 5 ) ; } } // Draw foreground if ( isForegroundVisible ( ) ) { G2 . drawImage ( foregroundImage , 0 , 0 , null ) ; } if ( ! isEnabled ( ) ) { G2 . drawImage ( disabledImage , 0 , 0 , null ) ; } G2 . dispose ( ) ;
public class YearView { /** * ~ Methoden - - - - - */ @ Override protected void buildContent ( ) { } }
for ( int i = 0 , n = this . getCalendarSystem ( ) . getMaxCountOfMonths ( ) ; i < n ; i ++ ) { Button button = new Button ( ) ; button . getStyleClass ( ) . add ( CSS_CALENDAR_CELL_INSIDE_RANGE ) ; button . setMaxWidth ( Double . MAX_VALUE ) ; button . setMaxHeight ( Double . MAX_VALUE ) ; GridPane . setVgrow ( button , Priority . ALWAYS ) ; GridPane . setHgrow ( button , Priority . ALWAYS ) ; button . setOnAction ( actionEvent -> { if ( getControl ( ) . viewIndexProperty ( ) . get ( ) == NavigationBar . YEAR_VIEW ) { T clickedDate = getControl ( ) . chronology ( ) . getChronoType ( ) . cast ( button . getUserData ( ) ) ; getControl ( ) . pageDateProperty ( ) . setValue ( clickedDate ) ; getControl ( ) . viewIndexProperty ( ) . set ( NavigationBar . MONTH_VIEW ) ; } } ) ; int colIndex = i % NUM_OF_COLUMNS ; int rowIndex = i / NUM_OF_COLUMNS ; add ( button , colIndex , rowIndex ) ; }
public class CustomTypeface { /** * This is a shortcut to let { @ code CustomTypeface } create directly a { @ link Typeface } * for you . This will create the Typeface from a file located in the assets directory . * For more information see the { @ link # registerTypeface ( String , Typeface ) } method . * @ param typefaceName a name that will identify this { @ code Typeface } * @ param assets a instance of { @ link AssetManager } * @ param filePath a path to a TTF file located inside the assets folder * @ see # registerTypeface ( String , Typeface ) */ public void registerTypeface ( String typefaceName , AssetManager assets , String filePath ) { } }
Typeface typeface = Typeface . createFromAsset ( assets , filePath ) ; mTypefaces . put ( typefaceName , typeface ) ;
public class DefaultGroovyMethods { /** * Finds all elements of the array matching the given Closure condition . * < pre class = " groovyTestCase " > * def items = [ 1,2,3,4 ] as Integer [ ] * assert [ 2,4 ] = = items . findAll { it % 2 = = 0 } * < / pre > * @ param self an array * @ param condition a closure condition * @ return a list of matching values * @ since 2.0 */ public static < T > Collection < T > findAll ( T [ ] self , @ ClosureParams ( FirstParam . Component . class ) Closure condition ) { } }
Collection < T > answer = new ArrayList < T > ( ) ; return findAll ( condition , answer , new ArrayIterator < T > ( self ) ) ;
public class BpmnParse { /** * Parses the messages of the given definitions file . Messages are not * contained within a process element , but they can be referenced from inner * process elements . */ public void parseMessages ( ) { } }
for ( Element messageElement : rootElement . elements ( "message" ) ) { String id = messageElement . attribute ( "id" ) ; String messageName = messageElement . attribute ( "name" ) ; Expression messageExpression = null ; if ( messageName != null ) { messageExpression = expressionManager . createExpression ( messageName ) ; } MessageDefinition messageDefinition = new MessageDefinition ( this . targetNamespace + ":" + id , messageExpression ) ; this . messages . put ( messageDefinition . getId ( ) , messageDefinition ) ; }
public class CmsLink { /** * Returns the first parameter value for the given parameter name . < p > * @ param name the name of the parameter * @ return the first value for this name or < code > null < / code > */ public String getParameter ( String name ) { } }
String [ ] p = getParameterMap ( ) . get ( name ) ; if ( p != null ) { return p [ 0 ] ; } return null ;
public class MysqlExportService { /** * This is the entry point for exporting * the database . It performs validation and * the initial object initializations , * database connection and setup * before ca * @ throws IOException exception * @ throws SQLException exception * @ throws ClassNotFoundException exception */ public void export ( ) throws IOException , SQLException , ClassNotFoundException { } }
// check if properties is set or not if ( ! isValidateProperties ( ) ) { logger . error ( "Invalid config properties: The config properties is missing important parameters: DB_NAME, DB_USERNAME and DB_PASSWORD" ) ; return ; } // connect to the database database = properties . getProperty ( DB_NAME ) ; String jdbcURL = properties . getProperty ( JDBC_CONNECTION_STRING , "" ) ; String driverName = properties . getProperty ( JDBC_DRIVER_NAME , "" ) ; Connection connection ; if ( jdbcURL . isEmpty ( ) ) { connection = MysqlBaseService . connect ( properties . getProperty ( DB_USERNAME ) , properties . getProperty ( DB_PASSWORD ) , database , driverName ) ; } else { if ( jdbcURL . contains ( "?" ) ) { database = jdbcURL . substring ( jdbcURL . lastIndexOf ( "/" ) + 1 , jdbcURL . indexOf ( "?" ) ) ; } else { database = jdbcURL . substring ( jdbcURL . lastIndexOf ( "/" ) + 1 ) ; } logger . debug ( "database name extracted from connection string: " + database ) ; connection = MysqlBaseService . connectWithURL ( properties . getProperty ( DB_USERNAME ) , properties . getProperty ( DB_PASSWORD ) , jdbcURL , driverName ) ; } stmt = connection . createStatement ( ) ; // generate the final SQL String sql = exportToSql ( ) ; // create a temp dir to store the exported file for processing dirName = properties . getProperty ( MysqlExportService . TEMP_DIR , dirName ) ; File file = new File ( dirName ) ; if ( ! file . exists ( ) ) { boolean res = file . mkdir ( ) ; if ( ! res ) { // logger . error ( LOG _ PREFIX + " : Unable to create temp dir : " + file . getAbsolutePath ( ) ) ; throw new IOException ( LOG_PREFIX + ": Unable to create temp dir: " + file . getAbsolutePath ( ) ) ; } } // write the sql file out File sqlFolder = new File ( dirName + "/sql" ) ; if ( ! sqlFolder . exists ( ) ) { boolean res = sqlFolder . mkdir ( ) ; if ( ! res ) { throw new IOException ( LOG_PREFIX + ": Unable to create temp dir: " + file . getAbsolutePath ( ) ) ; } } sqlFileName = getSqlFilename ( ) ; FileOutputStream outputStream = new FileOutputStream ( sqlFolder + "/" + sqlFileName ) ; outputStream . write ( sql . getBytes ( ) ) ; outputStream . close ( ) ; // zip the file zipFileName = dirName + "/" + sqlFileName . replace ( ".sql" , ".zip" ) ; generatedZipFile = new File ( zipFileName ) ; ZipUtil . pack ( sqlFolder , generatedZipFile ) ; // mail the zipped file if mail settings are available if ( isEmailPropertiesSet ( ) ) { boolean emailSendingRes = EmailService . builder ( ) . setHost ( properties . getProperty ( EMAIL_HOST ) ) . setPort ( Integer . valueOf ( properties . getProperty ( EMAIL_PORT ) ) ) . setToAddress ( properties . getProperty ( EMAIL_TO ) ) . setFromAddress ( properties . getProperty ( EMAIL_FROM ) ) . setUsername ( properties . getProperty ( EMAIL_USERNAME ) ) . setPassword ( properties . getProperty ( EMAIL_PASSWORD ) ) . setSubject ( properties . getProperty ( EMAIL_SUBJECT , sqlFileName . replace ( ".sql" , "" ) . toUpperCase ( ) ) ) . setMessage ( properties . getProperty ( EMAIL_MESSAGE , "Please find attached database backup of " + database ) ) . setAttachments ( new File [ ] { new File ( zipFileName ) } ) . sendMail ( ) ; if ( emailSendingRes ) { logger . debug ( LOG_PREFIX + ": Zip File Sent as Attachment to Email Address Successfully" ) ; } else { logger . error ( LOG_PREFIX + ": Unable to send zipped file as attachment to email. See log debug for more info" ) ; } } // clear the generated temp files clearTempFiles ( Boolean . parseBoolean ( properties . getProperty ( PRESERVE_GENERATED_ZIP , Boolean . FALSE . toString ( ) ) ) ) ;
public class BuildWrapper { /** * Provides an opportunity for a { @ link BuildWrapper } to decorate a { @ link Launcher } to be used in the build . * This hook is called very early on in the build ( even before { @ link # setUp ( AbstractBuild , Launcher , BuildListener ) } is invoked . ) * The typical use of { @ link Launcher } decoration involves in modifying the environment that processes run , * such as the use of sudo / pfexec / chroot , or manipulating environment variables . * The default implementation is no - op , which just returns the { @ code launcher } parameter as - is . * @ param build * The build in progress for which this { @ link BuildWrapper } is called . Never null . * @ param launcher * The default launcher . Never null . This method is expected to wrap this launcher . * This makes sure that when multiple { @ link BuildWrapper } s attempt to decorate the same launcher * it will sort of work . But if you are developing a plugin where such collision is not a concern , * you can also simply discard this { @ link Launcher } and create an entirely different { @ link Launcher } * and return it , too . * @ param listener * Connected to the build output . Never null . Can be used for error reporting . * @ return * Must not be null . If a fatal error happens , throw an exception . * @ throws RunnerAbortedException * If a fatal error is detected but the implementation handled it gracefully , throw this exception * to suppress stack trace . * @ since 1.280 * @ see LauncherDecorator */ public Launcher decorateLauncher ( AbstractBuild build , Launcher launcher , BuildListener listener ) throws IOException , InterruptedException , RunnerAbortedException { } }
return launcher ;
public class Schematic { /** * Returns a DB with reads the given input stream as a configuration document { @ code Document } . This document is * expected to contain a { @ link Schematic # TYPE _ FIELD type field } to indicate the type * of DB . * @ see # getDb ( Document , ClassLoader ) * @ throws ParsingException if the given input stream is not a valid JSON document */ public static < T extends SchematicDb > T getDb ( InputStream configInputStream ) throws ParsingException , RuntimeException { } }
Document document = Json . read ( configInputStream ) . withVariablesReplacedWithSystemProperties ( ) ; return getDb ( document , Schematic . class . getClassLoader ( ) ) ;
public class DeviceImpl { public DevCmdInfo_2 command_query_2 ( final String command ) throws DevFailed , SystemException { } }
Util . out4 . println ( "Device_2Impl.command_query_2(" + command + ") arrived" ) ; // Retrieve number of command and allocate memory to send back info final int nb_cmd = device_class . get_command_list ( ) . size ( ) ; Util . out4 . println ( nb_cmd + " command(s) for device" ) ; DevCmdInfo_2 back = null ; for ( int i = 0 ; i < nb_cmd ; i ++ ) { final Command cmd = ( Command ) device_class . get_command_list ( ) . elementAt ( i ) ; if ( cmd . get_name ( ) . toLowerCase ( ) . equals ( command . toLowerCase ( ) ) ) { final DevCmdInfo_2 tmp = new DevCmdInfo_2 ( ) ; tmp . cmd_name = cmd . get_name ( ) ; tmp . cmd_tag = cmd . get_tag ( ) ; tmp . level = cmd . get_disp_level ( ) ; tmp . in_type = cmd . get_in_type ( ) ; tmp . out_type = cmd . get_out_type ( ) ; final String str_in = cmd . get_in_type_desc ( ) ; if ( str_in . length ( ) != 0 ) { tmp . in_type_desc = str_in ; } else { tmp . in_type_desc = Tango_DescNotSet ; } final String str_out = cmd . get_out_type_desc ( ) ; if ( str_out . length ( ) != 0 ) { tmp . out_type_desc = str_out ; } else { tmp . out_type_desc = Tango_DescNotSet ; } back = tmp ; } } // throw an exception to client if command not found if ( back == null ) { Except . throw_exception ( "API_CommandNotFound" , "Command " + command + " not found" , "Device_2Impl.command_query_2" ) ; } // Record operation request in black box blackbox . insert_op ( Op_Command_list_2 ) ; Util . out4 . println ( "Device_2Impl.command_list_query_2 exiting" ) ; return back ;
public class StorageAccountsInner { /** * Gets the specified Azure Storage container associated with the given Data Lake Analytics and Azure Storage accounts . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Analytics account . * @ param storageAccountName The name of the Azure storage account from which to retrieve the blob container . * @ param containerName The name of the Azure storage container to retrieve * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the StorageContainerInner object if successful . */ public StorageContainerInner getStorageContainer ( String resourceGroupName , String accountName , String storageAccountName , String containerName ) { } }
return getStorageContainerWithServiceResponseAsync ( resourceGroupName , accountName , storageAccountName , containerName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ApiOvhEmailexchange { /** * Alter this object properties * REST : PUT / email / exchange / { organizationName } / service / { exchangeService } / externalContact / { externalEmailAddress } * @ param body [ required ] New object properties * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service * @ param externalEmailAddress [ required ] Contact email */ public void organizationName_service_exchangeService_externalContact_externalEmailAddress_PUT ( String organizationName , String exchangeService , String externalEmailAddress , OvhExchangeExternalContact body ) throws IOException { } }
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/externalContact/{externalEmailAddress}" ; StringBuilder sb = path ( qPath , organizationName , exchangeService , externalEmailAddress ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class Component { /** * 构建MD5签名参数 * @ param payParams 支付参数 */ void buildMd5SignParams ( Map < String , String > payParams ) { } }
String payString = buildSignString ( payParams ) ; String sign = md5 ( payString ) ; payParams . put ( AlipayField . SIGN_TYPE . field ( ) , SignType . MD5 . value ( ) ) ; payParams . put ( AlipayField . SIGN . field ( ) , sign ) ;
public class RegionBackendServiceClient { /** * Creates a regional BackendService resource in the specified project using the data included in * the request . There are several restrictions and guidelines to keep in mind when creating a * regional backend service . Read Restrictions and Guidelines for more information . * < p > Sample code : * < pre > < code > * try ( RegionBackendServiceClient regionBackendServiceClient = RegionBackendServiceClient . create ( ) ) { * ProjectRegionName region = ProjectRegionName . of ( " [ PROJECT ] " , " [ REGION ] " ) ; * BackendService backendServiceResource = BackendService . newBuilder ( ) . build ( ) ; * Operation response = regionBackendServiceClient . insertRegionBackendService ( region , backendServiceResource ) ; * < / code > < / pre > * @ param region Name of the region scoping this request . * @ param backendServiceResource A BackendService resource . This resource defines a group of * backend virtual machines and their serving capacity . ( = = resource _ for v1 . backendService = = ) * ( = = resource _ for beta . backendService = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertRegionBackendService ( ProjectRegionName region , BackendService backendServiceResource ) { } }
InsertRegionBackendServiceHttpRequest request = InsertRegionBackendServiceHttpRequest . newBuilder ( ) . setRegion ( region == null ? null : region . toString ( ) ) . setBackendServiceResource ( backendServiceResource ) . build ( ) ; return insertRegionBackendService ( request ) ;
public class DisplayReadHandler { /** * Called when a valid record is read from the table / query . * Reads the secondary record and set ' s the record not found message if not found . * @ param bDisplayOption If true , display any changes . */ public void doValidRecord ( boolean bDisplayOption ) { } }
if ( m_fldMain == null ) m_fldMain = this . getOwner ( ) . getField ( mainFieldName ) ; if ( m_fldMain == null ) return ; // Error - Field not found ? if ( fileKeyFieldName != null ) m_FileToRead . getField ( fileKeyFieldName ) . moveFieldToThis ( m_fldMain , DBConstants . DISPLAY , DBConstants . SCREEN_MOVE ) ; // SCREEN _ MOVE says this is coming from here else m_FileToRead . getField ( m_iFileKeyField ) . moveFieldToThis ( m_fldMain , DBConstants . DISPLAY , DBConstants . SCREEN_MOVE ) ; // SCREEN _ MOVE says this is coming from here try { boolean bSuccess = m_FileToRead . seek ( "=" ) ; if ( ! bSuccess ) { m_FileToRead . initRecord ( bDisplayOption ) ; // Put ' s record in an indeterminate state ( so this record won ' t be written ) and clears the fields . BaseField nextField = m_FileToRead . getField ( m_FileToRead . getDefaultDisplayFieldName ( ) ) ; String strRecNotFound = this . getOwner ( ) . getTable ( ) . getString ( RECORD_NOT_FOUND_MESSAGE ) ; if ( nextField instanceof StringField ) nextField . setString ( strRecNotFound , bDisplayOption , DBConstants . SCREEN_MOVE ) ; } } catch ( DBException ex ) { ex . printStackTrace ( ) ; } super . doValidRecord ( bDisplayOption ) ;
public class CmsSetupBean { /** * Sets the connection string to the database to the given value . < p > * @ param dbWorkConStr the connection string used by the OpenCms core */ public void setDbWorkConStr ( String dbWorkConStr ) { } }
String driver ; String pool = '.' + getPool ( ) + '.' ; driver = getDbProperty ( m_databaseKey + ".driver" ) ; setExtProperty ( CmsDbPoolV11 . KEY_DATABASE_POOL + pool + CmsDbPoolV11 . KEY_JDBC_DRIVER , driver ) ; setExtProperty ( CmsDbPoolV11 . KEY_DATABASE_POOL + pool + CmsDbPoolV11 . KEY_JDBC_URL , dbWorkConStr ) ; String testQuery = getDbTestQuery ( ) ; if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( testQuery ) ) { setExtProperty ( CmsDbPoolV11 . KEY_DATABASE_POOL + pool + "v11.connectionTestQuery" , testQuery ) ; } setExtProperty ( CmsDbPoolV11 . KEY_DATABASE_POOL + pool + CmsDbPoolV11 . KEY_JDBC_URL_PARAMS , getDbConStrParams ( ) ) ;
public class ManagedHttpHealthCheckService { /** * Updates health status using the specified { @ link HttpRequest } . */ private CompletionStage < AggregatedHttpMessage > updateHealthStatus ( ServiceRequestContext ctx , HttpRequest req ) { } }
return mode ( ctx , req ) . thenApply ( mode -> { if ( ! mode . isPresent ( ) ) { return BAD_REQUEST_RES ; } final boolean isHealthy = mode . get ( ) ; serverHealth . setHealthy ( isHealthy ) ; return isHealthy ? TURN_ON_RES : TURN_OFF_RES ; } ) ;
public class NotificationManager { /** * Whenever a new event triggers it will use this method to add the corresponding notification * to the notification lists . * @ param e * the event that for the notification . * @ param interaction */ @ SuppressWarnings ( "unchecked" ) public static void addNotification ( Event e , String interaction ) { } }
NotificationManager . notifications . add ( new InteractionNotification ( getNotificationID ( ) , NotificationManager . sim . getSchedule ( ) . getSteps ( ) , e . getLauncher ( ) , interaction , ( List < Object > ) e . getAffected ( ) ) ) ; logger . fine ( "New notification added to notifications list. There is currently: " + notifications . size ( ) + " notifications\n Notification added: " + NotificationManager . notifications . get ( NotificationManager . notifications . size ( ) - 1 ) ) ;
public class Bio { /** * Gets the value of the rid property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the rid property . * For example , to add a new item , do as follows : * < pre > * getRid ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link Object } */ public java . util . List < Object > getRid ( ) { } }
if ( rid == null ) { rid = new ArrayList < Object > ( ) ; } return this . rid ;
public class WSJobRepositoryImpl { /** * { @ inheritDoc } */ @ Override public Set < String > getJobNames ( ) { } }
if ( authService == null || authService . isAdmin ( ) || authService . isMonitor ( ) ) { return persistenceManagerService . getJobNamesSet ( ) ; } else if ( authService . isSubmitter ( ) ) { return persistenceManagerService . getJobNamesSet ( authService . getRunAsUser ( ) ) ; } throw new JobSecurityException ( "The current user " + authService . getRunAsUser ( ) + " is not authorized to perform any batch operations." ) ;
public class JmsBolt { /** * Consumes a tuple and sends a JMS message . * If autoAck is true , the tuple will be acknowledged * after the message is sent . * If JMS sending fails , the tuple will be failed . */ @ Override public void execute ( Tuple input ) { } }
// write the tuple to a JMS destination . . . LOG . debug ( "Tuple received. Sending JMS message." ) ; try { Message msg = this . producer . toMessage ( this . session , input ) ; if ( msg != null ) { if ( msg . getJMSDestination ( ) != null ) { this . messageProducer . send ( msg . getJMSDestination ( ) , msg ) ; } else { this . messageProducer . send ( msg ) ; } } if ( this . autoAck ) { LOG . debug ( "ACKing tuple: " + input ) ; this . collector . ack ( input ) ; } } catch ( JMSException e ) { // failed to send the JMS message , fail the tuple fast LOG . warn ( "Failing tuple: " + input ) ; LOG . warn ( "Exception: " , e ) ; this . collector . fail ( input ) ; }
public class DeepLearning { /** * Helper to update a Frame and adding it to the local trash at the same time * @ param target Frame referece , to be overwritten * @ param src Newly made frame , to be deleted via local trash * @ return src */ Frame updateFrame ( Frame target , Frame src ) { } }
if ( src != target ) ltrash ( src ) ; return src ;
public class ListTasksResult { /** * The list of task ARN entries for the < code > ListTasks < / code > request . * @ return The list of task ARN entries for the < code > ListTasks < / code > request . */ public java . util . List < String > getTaskArns ( ) { } }
if ( taskArns == null ) { taskArns = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return taskArns ;
public class HamtPMap { /** * Appends this map ' s contents to a string builder . */ private void appendTo ( StringBuilder sb ) { } }
if ( sb . length ( ) > 1 ) { sb . append ( ", " ) ; } sb . append ( key ) . append ( ": " ) . append ( value ) ; for ( HamtPMap < K , V > child : children ) { child . appendTo ( sb ) ; }
public class Occurrence { /** * 计算右熵 * @ param pair * @ return */ public double computeRightEntropy ( PairFrequency pair ) { } }
Set < Map . Entry < String , TriaFrequency > > entrySet = trieTria . prefixSearch ( pair . getKey ( ) + RIGHT ) ; return computeEntropy ( entrySet ) ;
public class CrawlerMetricsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CrawlerMetrics crawlerMetrics , ProtocolMarshaller protocolMarshaller ) { } }
if ( crawlerMetrics == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( crawlerMetrics . getCrawlerName ( ) , CRAWLERNAME_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getTimeLeftSeconds ( ) , TIMELEFTSECONDS_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getStillEstimating ( ) , STILLESTIMATING_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getLastRuntimeSeconds ( ) , LASTRUNTIMESECONDS_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getMedianRuntimeSeconds ( ) , MEDIANRUNTIMESECONDS_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getTablesCreated ( ) , TABLESCREATED_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getTablesUpdated ( ) , TABLESUPDATED_BINDING ) ; protocolMarshaller . marshall ( crawlerMetrics . getTablesDeleted ( ) , TABLESDELETED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FileUtils { /** * < p > Attempts to get the directory for a given file . Will return itself if the passed in file is a directory . Will * always return a trailing slash . < / p > * @ param file The file * @ return The path of the file spec or null if it is null */ public static String directory ( String file ) { } }
if ( StringUtils . isNullOrBlank ( file ) ) { return StringUtils . EMPTY ; } file = StringUtils . trim ( file ) ; int separator = indexOfLastSeparator ( file ) ; int extension = indexOfFileExtension ( file ) ; if ( extension == - 1 ) { return addTrailingSlashIfNeeded ( file ) ; } else if ( separator == - 1 ) { return StringUtils . EMPTY ; } return directory ( file . substring ( 0 , separator + 1 ) ) ;
public class JavaUtils { /** * Tests if this class loader is a JBoss RepositoryClassLoader * @ param loader * @ return */ public static boolean isJBossRepositoryClassLoader ( ClassLoader loader ) { } }
Class < ? > clazz = loader . getClass ( ) ; while ( ! clazz . getName ( ) . startsWith ( "java" ) ) { if ( "org.jboss.mx.loading.RepositoryClassLoader" . equals ( clazz . getName ( ) ) ) return true ; clazz = clazz . getSuperclass ( ) ; } return false ;
public class OperationProcessor { /** * Cancels those Operations in the given list that have not yet completed with the given exception . */ private void cancelIncompleteOperations ( Iterable < CompletableOperation > operations , Throwable failException ) { } }
assert failException != null : "no exception to set" ; int cancelCount = 0 ; for ( CompletableOperation o : operations ) { if ( ! o . isDone ( ) ) { this . state . failOperation ( o , failException ) ; cancelCount ++ ; } } log . warn ( "{}: Cancelling {} operations with exception: {}." , this . traceObjectId , cancelCount , failException . toString ( ) ) ;
public class ReOpenableHashPartition { /** * Spills this partition to disk . This method is invoked once after the initial open ( ) method * @ return Number of memorySegments in the writeBehindBuffers ! */ int spillInMemoryPartition ( Channel . ID targetChannel , IOManager ioManager , LinkedBlockingQueue < MemorySegment > writeBehindBuffers ) throws IOException { } }
this . initialPartitionBuffersCount = partitionBuffers . length ; // for ReOpenableHashMap this . initialBuildSideChannel = targetChannel ; initialBuildSideWriter = ioManager . createBlockChannelWriter ( targetChannel , writeBehindBuffers ) ; final int numSegments = this . partitionBuffers . length ; for ( int i = 0 ; i < numSegments ; i ++ ) { initialBuildSideWriter . writeBlock ( partitionBuffers [ i ] ) ; } this . partitionBuffers = null ; initialBuildSideWriter . close ( ) ; // num partitions are now in the writeBehindBuffers . We propagate this information back return numSegments ;
public class TabViewRenderer { /** * Encode the list of links that render the tabs * @ param context * @ param writer * @ param tabView * @ param currentlyActiveIndex * @ param tabs * @ param clientId * @ param hiddenInputFieldID * @ throws IOException */ private static void encodeTabLinks ( FacesContext context , ResponseWriter writer , TabView tabView , int currentlyActiveIndex , List < UIComponent > tabs , String clientId , String hiddenInputFieldID ) throws IOException { } }
writer . startElement ( "ul" , tabView ) ; writer . writeAttribute ( "id" , clientId , "id" ) ; Tooltip . generateTooltip ( context , tabView , writer ) ; String classes = "nav " ; if ( "left" . equalsIgnoreCase ( tabView . getTabPosition ( ) ) || "right" . equalsIgnoreCase ( tabView . getTabPosition ( ) ) ) { classes += " nav-pills nav-stacked" ; } else { classes = classes + ( tabView . isPills ( ) ? " nav-pills" : " nav-tabs" ) ; } if ( tabView . getStyleClass ( ) != null ) { classes += " " ; classes += tabView . getStyleClass ( ) ; } writer . writeAttribute ( "class" , classes , "class" ) ; String role = "tablist" ; AJAXRenderer . generateBootsFacesAJAXAndJavaScript ( context , tabView , writer , false ) ; R . encodeHTML4DHTMLAttrs ( writer , tabView . getAttributes ( ) , new String [ ] { "style" } ) ; if ( tabView . getRole ( ) != null ) { role = tabView . getRole ( ) ; } writer . writeAttribute ( "role" , role , "role" ) ; encodeTabs ( context , writer , tabs , currentlyActiveIndex , hiddenInputFieldID , tabView . isDisabled ( ) ) ; writer . endElement ( "ul" ) ;
public class GridSampler { /** * < p > Checks a set of points that have been transformed to sample points on an image against * the image ' s dimensions to see if the point are even within the image . < / p > * < p > This method will actually " nudge " the endpoints back onto the image if they are found to be * barely ( less than 1 pixel ) off the image . This accounts for imperfect detection of finder * patterns in an image where the QR Code runs all the way to the image border . < / p > * < p > For efficiency , the method will check points from either end of the line until one is found * to be within the image . Because the set of points are assumed to be linear , this is valid . < / p > * @ param image image into which the points should map * @ param points actual points in x1 , y1 , . . . , xn , yn form * @ throws NotFoundException if an endpoint is lies outside the image boundaries */ protected static void checkAndNudgePoints ( BitMatrix image , float [ ] points ) throws NotFoundException { } }
int width = image . getWidth ( ) ; int height = image . getHeight ( ) ; // Check and nudge points from start until we see some that are OK : boolean nudged = true ; int maxOffset = points . length - 1 ; // points . length must be even for ( int offset = 0 ; offset < maxOffset && nudged ; offset += 2 ) { int x = ( int ) points [ offset ] ; int y = ( int ) points [ offset + 1 ] ; if ( x < - 1 || x > width || y < - 1 || y > height ) { throw NotFoundException . getNotFoundInstance ( ) ; } nudged = false ; if ( x == - 1 ) { points [ offset ] = 0.0f ; nudged = true ; } else if ( x == width ) { points [ offset ] = width - 1 ; nudged = true ; } if ( y == - 1 ) { points [ offset + 1 ] = 0.0f ; nudged = true ; } else if ( y == height ) { points [ offset + 1 ] = height - 1 ; nudged = true ; } } // Check and nudge points from end : nudged = true ; for ( int offset = points . length - 2 ; offset >= 0 && nudged ; offset -= 2 ) { int x = ( int ) points [ offset ] ; int y = ( int ) points [ offset + 1 ] ; if ( x < - 1 || x > width || y < - 1 || y > height ) { throw NotFoundException . getNotFoundInstance ( ) ; } nudged = false ; if ( x == - 1 ) { points [ offset ] = 0.0f ; nudged = true ; } else if ( x == width ) { points [ offset ] = width - 1 ; nudged = true ; } if ( y == - 1 ) { points [ offset + 1 ] = 0.0f ; nudged = true ; } else if ( y == height ) { points [ offset + 1 ] = height - 1 ; nudged = true ; } }
public class ProjectClient { /** * Gets service resources ( a . k . a service project ) associated with this host project . * < p > Sample code : * < pre > < code > * try ( ProjectClient projectClient = ProjectClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( XpnResourceId element : projectClient . getXpnResourcesProjects ( project . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final GetXpnResourcesProjectsPagedResponse getXpnResourcesProjects ( String project ) { } }
GetXpnResourcesProjectsHttpRequest request = GetXpnResourcesProjectsHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return getXpnResourcesProjects ( request ) ;
public class AtomicLongMapSubject { /** * Fails if the { @ link AtomicLongMap } does not contain the given entry . */ @ SuppressWarnings ( "unchecked" ) // worse case should be a ClassCastException /* * TODO ( cpovirk ) : Consider requiring key to be a K here . But AtomicLongMapSubject isn ' t currently * parameterized , and if we ' re going to add a type parameter , I ' d rather wait until after we * ( hopefully ) remove the other existing type parameters . */ public void containsEntry ( Object key , long value ) { } }
checkNotNull ( key , "AtomicLongMap does not support null keys" ) ; long actualValue = ( ( AtomicLongMap < Object > ) actual ( ) ) . get ( key ) ; if ( actualValue != value ) { failWithActual ( "expected to contain entry" , immutableEntry ( key , value ) ) ; }
public class AlternativesImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } }
switch ( featureID ) { case SimpleAntlrPackage . ALTERNATIVES__GROUPS : return ( ( InternalEList < ? > ) getGroups ( ) ) . basicRemove ( otherEnd , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ;
public class JAASLoginModuleConfigImpl { /** * Process the properties from the server . xml / client . xml * @ param props */ private void processConfigProps ( Map < String , Object > props ) { } }
controlFlag = setControlFlag ( moduleConfig . controlFlag ( ) ) ; Map < String , Object > options = extractOptions ( props ) ; String originalLoginModuleClassName = moduleConfig . className ( ) ; if ( isDefaultLoginModule ( ) ) { String target = getTargetClassName ( originalLoginModuleClassName , options ) ; Class < ? > cl = getTargetClassForName ( target ) ; options . put ( LoginModuleProxy . KERNEL_DELEGATE , cl ) ; } else { options = processDelegateOptions ( options , originalLoginModuleClassName , classLoadingService , sharedLibrary , false ) ; } this . options = options ;
public class Wxs { /** * 创建一条文本响应 */ public static WxOutMsg respText ( String to , String content ) { } }
WxOutMsg out = new WxOutMsg ( "text" ) ; out . setContent ( content ) ; if ( to != null ) out . setToUserName ( to ) ; return out ;
public class WebApp { /** * defect 39851 needs to stop the exception from always being thrown */ @ FFDCIgnore ( Exception . class ) protected void addStaticFilePatternMappings ( RequestProcessor proxyReqProcessor ) { } }
String nextPattern ; ExtensionProcessor fileExtensionProcessor = getDefaultExtensionProcessor ( this , getConfiguration ( ) . getFileServingAttributes ( ) ) ; List patternList = fileExtensionProcessor . getPatternList ( ) ; Iterator patternIter = patternList . iterator ( ) ; int globalPatternsCount = 0 ; while ( patternIter . hasNext ( ) ) { nextPattern = ( String ) patternIter . next ( ) ; // PK18713 try { if ( proxyReqProcessor == null ) requestMapper . addMapping ( nextPattern , fileExtensionProcessor ) ; // PK18713 else requestMapper . addMapping ( nextPattern , proxyReqProcessor ) ; } catch ( Exception e ) { // Mapping clash . Log error // pk435011 // LIBERTY : Fix for RTC defect 49695 - - The logging level should match the severity of the message . if ( ! ! ! "/*" . equals ( nextPattern ) ) { logger . logp ( Level . SEVERE , CLASS_NAME , "initializeStaticFileHandler" , "error.adding.servlet.mapping.file.handler" , nextPattern ) ; } else { globalPatternsCount ++ ; } } } if ( globalPatternsCount > 1 ) { logger . logp ( Level . SEVERE , CLASS_NAME , "initializeStaticFileHandler" , "error.adding.servlet.mapping.file.handler" , "/*" ) ; }
public class FileFrom { /** * Match filenames against the provided parameterized string . */ public File inFileNamed ( String filenamePattern ) { } }
if ( filenamePattern != null && ! filenamePattern . isEmpty ( ) ) { f . setFilenamePattern ( new RegexParameterizedPatternParser ( filenamePattern ) ) ; } return f ;
public class MinimizationResult { /** * Retrieves all ( original ) states in a block . * @ param block * the block . * @ return a collection containing all original states in this block . */ public static < S , L > Collection < S > getStatesInBlock ( Block < S , L > block ) { } }
return new OriginalStateCollection < > ( block . getStates ( ) ) ;
public class RawFormatReader { /** * Depending on the next token read either a value , a list or a map . * @ param input * @ return * @ throws IOException */ private static Object readDynamic ( StreamingInput input ) throws IOException { } }
switch ( input . peek ( ) ) { case VALUE : input . next ( ) ; return input . getValue ( ) ; case NULL : input . next ( ) ; return input . getValue ( ) ; case LIST_START : return readList ( input ) ; case OBJECT_START : return readMap ( input ) ; } throw new SerializationException ( "Unable to read file, unknown start of value: " + input . peek ( ) ) ;
public class AccountClient { /** * Revoke a secret associated with a specific API key . * @ param apiKey The API key that the secret is associated to . * @ param secretId The id of the secret to revoke . * @ throws IOException if a network error occurred contacting the Nexmo Account API * @ throws NexmoClientException if there was a problem wit hthe Nexmo request or response object indicating that the request was unsuccessful . */ public void revokeSecret ( String apiKey , String secretId ) throws IOException , NexmoClientException { } }
revokeSecret ( new SecretRequest ( apiKey , secretId ) ) ;
public class NetworkMonitor { /** * Returns IP Address < br > * Requires READ _ PHONE _ STATE , ACCESS _ WIFI _ STATE and INTERNET permissions < br > * @ param ipv6 * Option to choose IP address type * @ return IP address made up of IPv6 if parameter ipv6 is true or IPv4 if * parameter ipv6 is false ; null if it do not have IP address * @ throws RuntimeException * when it fails due to poor network condition */ public String getIpAddress ( boolean ipv6 ) { } }
try { for ( Enumeration < NetworkInterface > networkInterfaces = NetworkInterface . getNetworkInterfaces ( ) ; networkInterfaces . hasMoreElements ( ) ; ) { NetworkInterface networkInterface = networkInterfaces . nextElement ( ) ; for ( Enumeration < InetAddress > addresses = networkInterface . getInetAddresses ( ) ; addresses . hasMoreElements ( ) ; ) { InetAddress inetAddress = addresses . nextElement ( ) ; if ( inetAddress . isLoopbackAddress ( ) ) { continue ; } if ( ipv6 && inetAddress instanceof Inet6Address ) { return inetAddress . getHostAddress ( ) ; } else if ( ipv6 == false && inetAddress instanceof Inet4Address ) { return inetAddress . getHostAddress ( ) ; } } } } catch ( SocketException e ) { throw new RuntimeException ( e ) ; } return null ;
public class Scanner { /** * Returns if this checker should be suppressed on the current tree path . * @ param suppressible holds information about the suppressibility of a checker * @ param errorProneOptions Options object configuring whether or not to suppress non - errors in * generated code . */ protected SuppressedState isSuppressed ( Suppressible suppressible , ErrorProneOptions errorProneOptions ) { } }
boolean suppressedInGeneratedCode = errorProneOptions . disableWarningsInGeneratedCode ( ) && severityMap ( ) . get ( suppressible . canonicalName ( ) ) != SeverityLevel . ERROR ; return currentSuppressions . suppressedState ( suppressible , suppressedInGeneratedCode ) ;
public class JDBCUtil { /** * Converts the date to a string and surrounds it in single - quotes via the escape method . If the * date is null , returns null . */ public String quote ( Date date ) { } }
return ( date == null ) ? null : escape ( String . valueOf ( date ) ) ;
public class StreamingQueryInfo { /** * get the value of that named property */ @ Override public Object getMember ( String name ) { } }
switch ( name ) { case "name" : return F_name ; case "id" : return F_id ; case "sourceStatuses" : return F_sourceStatuses ; case "sinkStatus" : return F_sinkStatus ; case "awaitTermination" : } return super . getMember ( name ) ;
public class AppiumServiceBuilder { /** * Provides a measure of how strongly this { @ link DriverService } supports the given * { @ code capabilities } . A score of 0 or less indicates that this { @ link DriverService } does not * support instances of { @ link org . openqa . selenium . WebDriver } that require { @ code capabilities } . * Typically , the score is generated by summing the number of capabilities that the driver * service directly supports that are unique to the driver service ( that is , things like * " { @ code proxy } " don ' t tend to count to the score ) . * Higher the score , higher the possibility of getting grid sessions created sooner . */ @ Override public int score ( Capabilities capabilites ) { } }
int score = 0 ; if ( capabilites . getCapability ( PLATFORM_NAME ) != null ) { score ++ ; } String browserName = capabilites . getBrowserName ( ) ; if ( browserName . equals ( BrowserType . CHROME ) || browserName . equals ( BrowserType . ANDROID ) || browserName . equals ( BrowserType . SAFARI ) ) { score ++ ; } return score ;
public class Const { /** * { @ inheritDoc } */ @ Override public < C > Const < A , C > zip ( Applicative < Function < ? super B , ? extends C > , Const < A , ? > > appFn ) { } }
return Monad . super . zip ( appFn ) . coerce ( ) ;
public class AbstractClientServerSupport { /** * Determines whether the { @ link ServerSocket } is alive and accepting client connections . * @ param serverSocket { @ link ServerSocket } to evaluate . * @ return a boolean value indicating whether the { @ link ServerSocket } is valid . * @ see java . net . ServerSocket # isBound ( ) * @ see java . net . ServerSocket # isClosed ( ) */ @ NullSafe protected boolean isRunning ( ServerSocket serverSocket ) { } }
return Optional . ofNullable ( serverSocket ) . map ( localServerSocket -> ! localServerSocket . isClosed ( ) && localServerSocket . isBound ( ) ) . orElse ( false ) ;
public class HashtableOnDisk { /** * Retrieve the object pointer from the table . If we are in the process of * doubling , this method finds the correct table to update ( since there are * two tables active during the doubling process ) . */ long getHtindex ( int index , int tableid ) { } }
if ( tableid == header . currentTableId ( ) ) { return htindex [ index ] ; } else { return new_htindex [ index ] ; }
public class ListTagsLogGroupResult { /** * The tags for the log group . * @ param tags * The tags for the log group . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListTagsLogGroupResult withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class Normalizer { /** * Auto - generates standard and other names for the datasource * from either its ID ( if URN ) or one of its existing names ( preferably - standard name ) * @ param pro data source ( BioPAX Provenance ) */ public static void autoName ( Provenance pro ) { } }
if ( ! ( pro . getUri ( ) . startsWith ( "urn:miriam:" ) || pro . getUri ( ) . startsWith ( "http://identifiers.org/" ) ) && pro . getName ( ) . isEmpty ( ) ) { log . info ( "Skipping: cannot normalize Provenance: " + pro . getUri ( ) ) ; } else { // i . e . , ' name ' is not empty or ID is the URN final SortedSet < String > names = new TreeSet < String > ( ) ; String key = null ; if ( pro . getUri ( ) . startsWith ( "urn:miriam:" ) || pro . getUri ( ) . startsWith ( "http://identifiers.org/" ) ) { key = pro . getUri ( ) ; } else if ( pro . getStandardName ( ) != null ) { key = pro . getStandardName ( ) ; } else { key = pro . getDisplayName ( ) ; // can be null } if ( key != null ) { try { names . addAll ( Arrays . asList ( MiriamLink . getNames ( key ) ) ) ; pro . setStandardName ( MiriamLink . getName ( key ) ) ; // get the datasource description String description = MiriamLink . getDataTypeDef ( pro . getStandardName ( ) ) ; pro . addComment ( description ) ; } catch ( IllegalArgumentException e ) { // ignore ( then , names is still empty . . . ) } } // when the above failed ( no match in Miriam ) , or key was null - if ( names . isEmpty ( ) ) { // finally , trying to find all valid names for each existing one for ( String name : pro . getName ( ) ) { try { names . addAll ( Arrays . asList ( MiriamLink . getNames ( name ) ) ) ; } catch ( IllegalArgumentException e ) { // ignore } } // pick up the first name , get the standard name if ( ! names . isEmpty ( ) ) pro . setStandardName ( MiriamLink . getName ( names . iterator ( ) . next ( ) ) ) ; } // and add all the synonyms if any for ( String name : names ) pro . addName ( name ) ; // set display name if not set ( standard name is set already ) if ( pro . getDisplayName ( ) == null ) pro . setDisplayName ( pro . getStandardName ( ) ) ; }
public class ArrayFunctions { /** * Returned expression results in new array with at most n occurrences of value1 replaced with value2. */ public static Expression arrayReplace ( JsonArray array , Expression value1 , Expression value2 , long n ) { } }
return arrayReplace ( x ( array ) , value1 , value2 , n ) ;