signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Quicksort { /** * Sort routine that recursively calls itself after the partition algorithm sorted the elements
* in order . This routine is used for both the ascending and descending algorithms . The only
* difference is the call to the partition algorithm .
* @ param < E > the type of elements in this list .
* @ param list list that we want to sort
* @ param start index of the starting point to sort
* @ param end index of the end point to sort
* @ param descending boolean value that determines if we want to do descending sort */
private static < E extends Comparable < E > > void sort ( List < E > list , int start , int end , boolean descending ) { } } | if ( start >= end ) { return ; } int pivot = 0 ; if ( descending ) { pivot = Quicksort . partitionDescending ( list , start , end ) ; } else { pivot = Quicksort . partition ( list , start , end ) ; } Quicksort . sort ( list , start , pivot - 1 , descending ) ; Quicksort . sort ( list , pivot + 1 , end , descending ) ; |
public class ServerRequestContext { /** * Retrieve the current server request context .
* @ param < T > The body type
* @ return The request context if it is present */
@ SuppressWarnings ( "unchecked" ) public static < T > Optional < HttpRequest < T > > currentRequest ( ) { } } | return Optional . ofNullable ( REQUEST . get ( ) ) ; |
public class ByteBufQueue { /** * Returns the first ByteBuf of this queue if the queue is not empty .
* Otherwise returns null .
* @ return the first ByteBuf of the queue or { @ code null } */
@ Nullable @ Contract ( pure = true ) public ByteBuf peekBuf ( ) { } } | return hasRemaining ( ) ? bufs [ first ] : null ; |
public class AttachableURLFactory { /** * documentation inherited from interface URLStreamHandlerFactory */
public URLStreamHandler createURLStreamHandler ( String protocol ) { } } | Class < ? extends URLStreamHandler > handler = _handlers . get ( protocol . toLowerCase ( ) ) ; if ( handler != null ) { try { return handler . newInstance ( ) ; } catch ( Exception e ) { log . warning ( "Unable to instantiate URLStreamHandler" , "protocol" , protocol , "cause" , e ) ; } } return null ; |
public class DISCARD { /** * Checks if a message should be passed up , or not */
protected boolean shouldDropUpMessage ( @ SuppressWarnings ( "UnusedParameters" ) Message msg , Address sender ) { } } | if ( discard_all && ! sender . equals ( localAddress ( ) ) ) return true ; if ( ignoredMembers . contains ( sender ) ) { if ( log . isTraceEnabled ( ) ) log . trace ( localAddress + ": dropping message from " + sender ) ; num_up ++ ; return true ; } if ( up > 0 ) { double r = Math . random ( ) ; if ( r < up ) { if ( excludeItself && sender . equals ( localAddress ( ) ) ) { if ( log . isTraceEnabled ( ) ) log . trace ( "excluding myself" ) ; } else { if ( log . isTraceEnabled ( ) ) log . trace ( localAddress + ": dropping message from " + sender ) ; num_up ++ ; return true ; } } } return false ; |
public class DescribeDirectConnectGatewayAttachmentsResult { /** * The attachments .
* @ param directConnectGatewayAttachments
* The attachments . */
public void setDirectConnectGatewayAttachments ( java . util . Collection < DirectConnectGatewayAttachment > directConnectGatewayAttachments ) { } } | if ( directConnectGatewayAttachments == null ) { this . directConnectGatewayAttachments = null ; return ; } this . directConnectGatewayAttachments = new com . amazonaws . internal . SdkInternalList < DirectConnectGatewayAttachment > ( directConnectGatewayAttachments ) ; |
public class Channel { /** * query this channel for chain information .
* The request is sent to a random peer in the channel
* @ param userContext the user context to use .
* @ return a { @ link BlockchainInfo } object containing the chain info requested
* @ throws InvalidArgumentException
* @ throws ProposalException */
public BlockchainInfo queryBlockchainInfo ( User userContext ) throws ProposalException , InvalidArgumentException { } } | return queryBlockchainInfo ( getShuffledPeers ( EnumSet . of ( PeerRole . LEDGER_QUERY ) ) , userContext ) ; |
public class AbstractValidate { /** * < p > Validate that the specified argument map is neither { @ code null } nor a size of zero ( no elements ) ; otherwise throwing an exception with the specified message . < / p >
* < pre > Validate . notEmpty ( myMap , " The map must not be empty " ) ; < / pre >
* @ param < T >
* the map type
* @ param map
* the map to check , validated not null by this method
* @ param message
* the { @ link String # format ( String , Object . . . ) } exception message if invalid , not null
* @ param values
* the optional values for the formatted exception message , null array not recommended
* @ return the validated map ( never { @ code null } method for chaining )
* @ throws NullPointerValidationException
* if the map is { @ code null }
* @ throws IllegalArgumentException
* if the map is empty
* @ see # notEmpty ( Object [ ] ) */
public < T extends Map < ? , ? > > T notEmpty ( final T map , final String message , final Object ... values ) { } } | if ( map == null ) { failNull ( String . format ( message , values ) ) ; } if ( map . isEmpty ( ) ) { fail ( String . format ( message , values ) ) ; } return map ; |
public class CmsPublishList { /** * Checks if the publish list contains a resource . < p >
* @ param res the resource
* @ return true if the publish list contains a resource */
protected boolean containsResource ( CmsResource res ) { } } | return m_deletedFolderList . contains ( res ) || m_folderList . contains ( res ) || m_fileList . contains ( res ) ; |
public class Frame { /** * Encodes samples into the appropriate compressed format , saving the
* result in the given “ data ” EncodedElement list . Encodes ' count ' samples ,
* from index ' start ' , to index ' start ' times ' skip ' , where “ skip ” is the
* format that samples may be packed in an array . For example , ' samples ' may
* include both left and right samples of a stereo stream . Therefore , “ skip ”
* would equal 2 , resulting in the valid indices for the first channel being
* even , and second being odd .
* @ param samples the audio samples to encode . This array may contain
* samples for multiple channels , interleaved ; only one of
* these channels is encoded by a subframe .
* @ param count the number of samples to encode .
* @ param start the index to start at in the array .
* @ param skip the number of indices to skip between successive samples
* ( for use when channels are interleaved in the given
* array ) .
* @ param result the EncodedElement to attach encoded data to . Data in
* Encoded Element given is not altered . New data is
* attached starting with “ data . getNext ( ) ” . If “ data ”
* already has a “ next ” set , it will be lost !
* @ return int Returns the number of inter - channel samples encoded ;
* i . e , if block - size is 4000 , and it is stereo audio .
* There are 8000 samples in this block , but the return
* value is “ 4000 ” . There is always an equal number of
* samples encoded from each channel . This exists primarily
* to support dynamic block sizes in the future ;
* Pre - condition : none
* Post - condition : Argument ' data ' is the head of a list containing the resulting ,
* encoded data stream . */
public int encodeSamples ( int [ ] samples , int count , int start , int skip , EncodedElement result , long frameNumber ) { } } | // System . err . println ( " FRAME : : encodeSamples : frame # : " + frameNumber ) ;
if ( DEBUG_LEV > 0 ) { System . err . println ( "FRAME::encodeSamplesNew(...)" ) ; if ( DEBUG_LEV > 10 ) { System . err . println ( "\tsamples.length:" + samples . length + ":count:" + count + ":start:" + start + ":skip:" + skip + ":frameNumber:" + frameNumber ) ; } } int samplesEncoded = count ; testConstant = true ; EncodedElement data = null ; ChannelData [ ] [ ] chanConfigData = this . getChannelsToEncode ( samples , count , sc . getChannelCount ( ) , sc . getBitsPerSample ( ) ) ; int size = Integer . MAX_VALUE ; EncodingConfiguration . ChannelConfig chConf = EncodingConfiguration . ChannelConfig . INDEPENDENT ; for ( int i = 0 ; i < chanConfigData . length ; i ++ ) { EncodedElement temp = new EncodedElement ( ) ; int configSize = encodeChannels ( chanConfigData [ i ] , temp ) ; if ( configSize < size ) { size = configSize ; data = temp ; chConf = determineConfigUsed ( chanConfigData [ i ] ) ; } } // create header element ; attach to result
EncodedElement header = new EncodedElement ( FrameHeader . MAX_HEADER_SIZE , 0 ) ; frameHeader . createHeader ( true , count , sc . getSampleRate ( ) , chConf , sc . getBitsPerSample ( ) , frameNumber , channels , header ) ; // result . setNext ( header ) ;
result . attachEnd ( header ) ; // attach data to header
header . attachEnd ( data ) ; // use " data " to zero - pad to byte boundary .
// EncodedElement temp = data . getEnd ( ) ;
data . padToByte ( ) ; // calculate CRC and affix footer
EncodedElement crc16Ele = getCRC16 ( header ) ; data . attachEnd ( crc16Ele ) ; if ( DEBUG_LEV > 0 ) System . err . println ( "Frame::encodeSamples(...): End" ) ; return samplesEncoded ; |
public class GCFLTImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . GCFLT__RG : return rg != null && ! rg . isEmpty ( ) ; } return super . eIsSet ( featureID ) ; |
public class DenseGrid { /** * Creates a { @ code DenseGrid } copying from another grid .
* @ param < V > the type of the value
* @ param grid the grid to copy , not null
* @ return the mutable grid , not null */
@ SuppressWarnings ( "unchecked" ) public static < V > DenseGrid < V > create ( Grid < ? extends V > grid ) { } } | if ( grid == null ) { throw new IllegalArgumentException ( "Grid must not be null" ) ; } if ( grid instanceof DenseImmutableGrid ) { return new DenseGrid < V > ( ( DenseImmutableGrid < V > ) grid ) ; } DenseGrid < V > created = DenseGrid . create ( grid . rowCount ( ) , grid . columnCount ( ) ) ; created . putAll ( grid ) ; return created ; |
public class VertxCompletableFuture { /** * Creates a new { @ link VertxCompletableFuture } using the current context . This method is used to switch between
* Vert . x contexts .
* @ return the created { @ link VertxCompletableFuture } */
public VertxCompletableFuture < T > withContext ( ) { } } | Context context = Objects . requireNonNull ( Vertx . currentContext ( ) ) ; return withContext ( context ) ; |
public class SecurityPolicySettings { /** * Gets the securityPolicyType value for this SecurityPolicySettings .
* @ return securityPolicyType * Type of security policy . This determines which other fields
* should be populated . This value is
* required for a valid security policy . */
public com . google . api . ads . admanager . axis . v201808 . SecurityPolicyType getSecurityPolicyType ( ) { } } | return securityPolicyType ; |
public class FeatureAttributeEditor { /** * Return the feature , with the current values in the " editable " form . This feature will not necessarily contain
* validated attribute values , so it is recommended to call the < code > validate < / code > method first .
* @ return */
public Feature getFeature ( ) { } } | for ( AttributeInfo info : layer . getLayerInfo ( ) . getFeatureInfo ( ) . getAttributes ( ) ) { featureForm . fromForm ( info . getName ( ) , feature . getAttributes ( ) . get ( info . getName ( ) ) ) ; } return feature ; |
public class DumbMetaEntryFactory { /** * { @ inheritDoc } */
@ Override public IMetaEntry deserializeEntry ( DataInput pData ) throws TTIOException { } } | try { final int kind = pData . readInt ( ) ; switch ( kind ) { case KEY : return new DumbKey ( pData . readLong ( ) ) ; case VALUE : return new DumbValue ( pData . readLong ( ) ) ; default : throw new IllegalStateException ( "Kind not defined." ) ; } } catch ( final IOException exc ) { throw new TTIOException ( exc ) ; } |
public class AppServiceEnvironmentsInner { /** * Get metric definitions for a specific instance of a worker pool of an App Service Environment .
* Get metric definitions for a specific instance of a worker pool of an App Service Environment .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service Environment .
* @ param workerPoolName Name of the worker pool .
* @ param instance Name of the instance in the worker pool .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ResourceMetricDefinitionInner & gt ; object */
public Observable < ServiceResponse < Page < ResourceMetricDefinitionInner > > > listWorkerPoolInstanceMetricDefinitionsWithServiceResponseAsync ( final String resourceGroupName , final String name , final String workerPoolName , final String instance ) { } } | return listWorkerPoolInstanceMetricDefinitionsSinglePageAsync ( resourceGroupName , name , workerPoolName , instance ) . concatMap ( new Func1 < ServiceResponse < Page < ResourceMetricDefinitionInner > > , Observable < ServiceResponse < Page < ResourceMetricDefinitionInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ResourceMetricDefinitionInner > > > call ( ServiceResponse < Page < ResourceMetricDefinitionInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listWorkerPoolInstanceMetricDefinitionsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ; |
public class AntClassLoader { /** * Returns the classpath this classloader will consult .
* @ return the classpath used for this classloader , with elements
* separated by the path separator for the system . */
public String getClasspath ( ) { } } | StringBuilder sb = new StringBuilder ( ) ; boolean firstPass = true ; Enumeration componentEnum = pathComponents . elements ( ) ; while ( componentEnum . hasMoreElements ( ) ) { if ( ! firstPass ) { sb . append ( System . getProperty ( "path.separator" ) ) ; } else { firstPass = false ; } sb . append ( ( ( File ) componentEnum . nextElement ( ) ) . getAbsolutePath ( ) ) ; } return sb . toString ( ) ; |
public class CommerceShippingFixedOptionLocalServiceWrapper { /** * Creates a new commerce shipping fixed option with the primary key . Does not add the commerce shipping fixed option to the database .
* @ param commerceShippingFixedOptionId the primary key for the new commerce shipping fixed option
* @ return the new commerce shipping fixed option */
@ Override public com . liferay . commerce . shipping . engine . fixed . model . CommerceShippingFixedOption createCommerceShippingFixedOption ( long commerceShippingFixedOptionId ) { } } | return _commerceShippingFixedOptionLocalService . createCommerceShippingFixedOption ( commerceShippingFixedOptionId ) ; |
public class RxFile { /** * Get a thumbnail from the provided Image or Video Uri . */
public static Observable < Bitmap > getThumbnail ( Context context , Uri uri ) { } } | return getThumbnailFromUri ( context , uri ) ; |
public class LoggingEndpointInterceptor { /** * Write response message to logger . */
public boolean handleResponse ( MessageContext messageContext , Object endpoint ) throws Exception { } } | logResponse ( "Sending SOAP response" , messageContext , false ) ; return true ; |
public class RestoreDBInstanceFromS3Request { /** * The list of logs that the restored DB instance is to export to CloudWatch Logs . The values in the list depend on
* the DB engine being used . For more information , see < a href =
* " https : / / docs . aws . amazon . com / AmazonRDS / latest / UserGuide / USER _ LogAccess . html # USER _ LogAccess . Procedural . UploadtoCloudWatch "
* > Publishing Database Logs to Amazon CloudWatch Logs < / a > in the < i > Amazon RDS User Guide < / i > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setEnableCloudwatchLogsExports ( java . util . Collection ) } or
* { @ link # withEnableCloudwatchLogsExports ( java . util . Collection ) } if you want to override the existing values .
* @ param enableCloudwatchLogsExports
* The list of logs that the restored DB instance is to export to CloudWatch Logs . The values in the list
* depend on the DB engine being used . For more information , see < a href =
* " https : / / docs . aws . amazon . com / AmazonRDS / latest / UserGuide / USER _ LogAccess . html # USER _ LogAccess . Procedural . UploadtoCloudWatch "
* > Publishing Database Logs to Amazon CloudWatch Logs < / a > in the < i > Amazon RDS User Guide < / i > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public RestoreDBInstanceFromS3Request withEnableCloudwatchLogsExports ( String ... enableCloudwatchLogsExports ) { } } | if ( this . enableCloudwatchLogsExports == null ) { setEnableCloudwatchLogsExports ( new com . amazonaws . internal . SdkInternalList < String > ( enableCloudwatchLogsExports . length ) ) ; } for ( String ele : enableCloudwatchLogsExports ) { this . enableCloudwatchLogsExports . add ( ele ) ; } return this ; |
public class LinearSearch { /** * Search for the minimum element in the array .
* @ param doubleArray array that we are searching in .
* @ return the minimum element in the array . */
public static double searchMin ( double [ ] doubleArray ) { } } | if ( doubleArray . length == 0 ) { throw new IllegalArgumentException ( "The array you provided does not have any elements" ) ; } double min = doubleArray [ 0 ] ; for ( int i = 1 ; i < doubleArray . length ; i ++ ) { if ( doubleArray [ i ] < min ) { min = doubleArray [ i ] ; } } return min ; |
public class SAXProcessor { /** * Computes the distance between approximated values and the real TS .
* @ param ts the timeseries .
* @ param winSize SAX window size .
* @ param paaSize SAX PAA size .
* @ param alphabetSize SAX alphabet size .
* @ param normThreshold the normalization threshold .
* @ return the distance value .
* @ throws Exception if error occurs . */
public double approximationDistanceAlphabet ( double [ ] ts , int winSize , int paaSize , int alphabetSize , double normThreshold ) throws Exception { } } | double resDistance = 0d ; int windowCounter = 0 ; double [ ] centralLines = na . getCentralCuts ( alphabetSize ) ; double [ ] cuts = na . getCuts ( alphabetSize ) ; for ( int i = 0 ; i < ts . length - winSize + 1 ; i ++ ) { double [ ] subseries = Arrays . copyOfRange ( ts , i , i + winSize ) ; double subsequenceDistance = 0. ; if ( tsProcessor . stDev ( subseries ) > normThreshold ) { subseries = tsProcessor . znorm ( subseries , normThreshold ) ; } double [ ] paa = tsProcessor . paa ( subseries , paaSize ) ; int [ ] leterIndexes = tsProcessor . ts2Index ( paa , cuts ) ; windowCounter ++ ; // essentially the distance here is the distance between the segment ' s
// PAA value and the real TS value
for ( int j = 0 ; j < paa . length ; j ++ ) { // compute the alphabet central cut line
int letterIdx = leterIndexes [ j ] ; double cLine = centralLines [ letterIdx ] ; subsequenceDistance = subsequenceDistance + ed . distance ( cLine , paa [ j ] ) ; } resDistance = resDistance + subsequenceDistance / paa . length ; } return resDistance / ( double ) windowCounter ; |
public class CoreOptions { /** * Creates a composite option of { @ link VMOption } s .
* @ param vmOptions
* virtual machine options ( cannot be null or containing null entries )
* @ return composite option of virtual machine options
* @ throws IllegalArgumentException
* - If urls array is null or contains null entries */
public static Option vmOptions ( final String ... vmOptions ) { } } | validateNotEmptyContent ( vmOptions , true , "VM options" ) ; final List < VMOption > options = new ArrayList < VMOption > ( ) ; for ( String vmOption : vmOptions ) { options . add ( vmOption ( vmOption ) ) ; } return vmOptions ( options . toArray ( new VMOption [ options . size ( ) ] ) ) ; |
public class CassandraSchemaMgr { /** * Delete the keyspace with the given name . This method can use any DB connection .
* @ param dbConn Database connection to use .
* @ param keyspace Name of keyspace to drop . */
public void dropKeyspace ( DBConn dbConn , String keyspace ) { } } | m_logger . info ( "Deleting Keyspace '{}'" , keyspace ) ; try { dbConn . getClientSession ( ) . system_drop_keyspace ( keyspace ) ; waitForSchemaPropagation ( dbConn ) ; } catch ( Exception ex ) { String errMsg = "Failed to delete Keyspace '" + keyspace + "'" ; m_logger . error ( errMsg , ex ) ; throw new RuntimeException ( errMsg , ex ) ; } |
public class GuardRail { /** * Release acquired permits with known result . Since there is a known result the result
* count object and latency will be updated .
* @ param context context of the task execution
* @ param result of the execution */
public void releasePermits ( ExecutionContext context , Result result ) { } } | releasePermits ( context . permitCount ( ) , result , context . startNanos ( ) , clock . nanoTime ( ) ) ; |
public class NS { /** * Returns a < a href =
* " http : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . SpecifyingConditions . html # ConditionExpressionReference . Functions "
* > comparator condition < / a > ( that evaluates to true if the attribute value
* referred to by this path operand is not equal to that of the specified
* path operand ) for building condition expression . */
public < T extends Number > ComparatorCondition ne ( Set < T > value ) { } } | return new ComparatorCondition ( "<>" , this , new LiteralOperand ( value ) ) ; |
public class OidcAuthenticator { /** * The preferred { @ link ClientAuthenticationMethod } specified in the given
* { @ link OidcConfiguration } , or < code > null < / code > meaning that the a
* provider - supported method should be chosen . */
private static ClientAuthenticationMethod getPreferredAuthenticationMethod ( OidcConfiguration config ) { } } | final ClientAuthenticationMethod configurationMethod = config . getClientAuthenticationMethod ( ) ; if ( configurationMethod == null ) { return null ; } if ( ! SUPPORTED_METHODS . contains ( configurationMethod ) ) { throw new TechnicalException ( "Configured authentication method (" + configurationMethod + ") is not supported." ) ; } return configurationMethod ; |
public class ASN1Set { /** * return an ASN1Set from the given object .
* @ param obj the object we want converted .
* @ exception IllegalArgumentException if the object cannot be converted . */
public static ASN1Set getInstance ( Object obj ) { } } | if ( obj == null || obj instanceof ASN1Set ) { return ( ASN1Set ) obj ; } throw new IllegalArgumentException ( "unknown object in getInstance" ) ; |
public class BoxRetentionPolicy { /** * Returns iterable with all enterprise assignments of this retention policy .
* @ param limit the limit of entries per response . The default value is 100.
* @ param fields the fields to retrieve .
* @ return an iterable containing all enterprise assignments . */
public Iterable < BoxRetentionPolicyAssignment . Info > getEnterpriseAssignments ( int limit , String ... fields ) { } } | return this . getAssignments ( BoxRetentionPolicyAssignment . TYPE_ENTERPRISE , limit , fields ) ; |
public class ReplicationTaskAssessmentResultMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ReplicationTaskAssessmentResult replicationTaskAssessmentResult , ProtocolMarshaller protocolMarshaller ) { } } | if ( replicationTaskAssessmentResult == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( replicationTaskAssessmentResult . getReplicationTaskIdentifier ( ) , REPLICATIONTASKIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getReplicationTaskArn ( ) , REPLICATIONTASKARN_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getReplicationTaskLastAssessmentDate ( ) , REPLICATIONTASKLASTASSESSMENTDATE_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getAssessmentStatus ( ) , ASSESSMENTSTATUS_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getAssessmentResultsFile ( ) , ASSESSMENTRESULTSFILE_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getAssessmentResults ( ) , ASSESSMENTRESULTS_BINDING ) ; protocolMarshaller . marshall ( replicationTaskAssessmentResult . getS3ObjectUrl ( ) , S3OBJECTURL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GlobalUsersInner { /** * List labs for the user .
* @ param userName The name of the user .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ListLabsResponseInner object */
public Observable < ListLabsResponseInner > listLabsAsync ( String userName ) { } } | return listLabsWithServiceResponseAsync ( userName ) . map ( new Func1 < ServiceResponse < ListLabsResponseInner > , ListLabsResponseInner > ( ) { @ Override public ListLabsResponseInner call ( ServiceResponse < ListLabsResponseInner > response ) { return response . body ( ) ; } } ) ; |
public class FunctionVersionReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return FunctionVersion ResourceSet */
@ Override public ResourceSet < FunctionVersion > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class Group { /** * Adds devices matching pattern
* to the group
* @ throws DevFailed */
public void add ( final String p ) throws DevFailed { } } | synchronized ( this ) { final Vector v = factory . instanciate ( p ) ; final Iterator it = v . iterator ( ) ; while ( it . hasNext ( ) ) { add_i ( ( GroupElement ) it . next ( ) ) ; } } |
public class AnnotatedHttpServiceFactory { /** * Returns the list of { @ link AnnotatedHttpService } defined by { @ link Path } and HTTP method annotations
* from the specified { @ code object } . */
public static List < AnnotatedHttpServiceElement > find ( String pathPrefix , Object object , Iterable < ? > exceptionHandlersAndConverters ) { } } | Builder < ExceptionHandlerFunction > exceptionHandlers = null ; Builder < RequestConverterFunction > requestConverters = null ; Builder < ResponseConverterFunction > responseConverters = null ; for ( final Object o : exceptionHandlersAndConverters ) { boolean added = false ; if ( o instanceof ExceptionHandlerFunction ) { if ( exceptionHandlers == null ) { exceptionHandlers = ImmutableList . builder ( ) ; } exceptionHandlers . add ( ( ExceptionHandlerFunction ) o ) ; added = true ; } if ( o instanceof RequestConverterFunction ) { if ( requestConverters == null ) { requestConverters = ImmutableList . builder ( ) ; } requestConverters . add ( ( RequestConverterFunction ) o ) ; added = true ; } if ( o instanceof ResponseConverterFunction ) { if ( responseConverters == null ) { responseConverters = ImmutableList . builder ( ) ; } responseConverters . add ( ( ResponseConverterFunction ) o ) ; added = true ; } if ( ! added ) { throw new IllegalArgumentException ( o . getClass ( ) . getName ( ) + " is neither an exception handler nor a converter." ) ; } } final List < ExceptionHandlerFunction > exceptionHandlerFunctions = exceptionHandlers != null ? exceptionHandlers . build ( ) : ImmutableList . of ( ) ; final List < RequestConverterFunction > requestConverterFunctions = requestConverters != null ? requestConverters . build ( ) : ImmutableList . of ( ) ; final List < ResponseConverterFunction > responseConverterFunctions = responseConverters != null ? responseConverters . build ( ) : ImmutableList . of ( ) ; final List < Method > methods = requestMappingMethods ( object ) ; return methods . stream ( ) . map ( ( Method method ) -> create ( pathPrefix , object , method , exceptionHandlerFunctions , requestConverterFunctions , responseConverterFunctions ) ) . collect ( toImmutableList ( ) ) ; |
public class GetUtterancesViewRequest { /** * An array of bot versions for which utterance information should be returned . The limit is 5 versions per request .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setBotVersions ( java . util . Collection ) } or { @ link # withBotVersions ( java . util . Collection ) } if you want to
* override the existing values .
* @ param botVersions
* An array of bot versions for which utterance information should be returned . The limit is 5 versions per
* request .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetUtterancesViewRequest withBotVersions ( String ... botVersions ) { } } | if ( this . botVersions == null ) { setBotVersions ( new java . util . ArrayList < String > ( botVersions . length ) ) ; } for ( String ele : botVersions ) { this . botVersions . add ( ele ) ; } return this ; |
public class ChainedResourceLoader { /** * Returns { @ code true } , if any of the delegate loaders returns { @ true } from
* { @ link # exists ( String ) } for the given path . */
@ Override public boolean exists ( String resource , String [ ] paths ) throws IOException { } } | for ( ResourceLoader loader : loaders ) { if ( loader . exists ( resource , paths ) ) { return true ; } } return false ; |
public class VertexData { /** * Replaces the contents of this vertex data by the provided one . This is a deep copy . The vertex attribute are each individually cloned .
* @ param data The data to copy . */
public void copy ( VertexData data ) { } } | clear ( ) ; indices . addAll ( data . indices ) ; final TIntObjectIterator < VertexAttribute > iterator = data . attributes . iterator ( ) ; while ( iterator . hasNext ( ) ) { iterator . advance ( ) ; attributes . put ( iterator . key ( ) , iterator . value ( ) . clone ( ) ) ; } nameToIndex . putAll ( data . nameToIndex ) ; |
public class MatchResultPredicates { /** * Generates a Predicate that only accepts the Match Results within a range . The range may be closed or open at each
* of the boundaries . { @ code BoundType . CLOSED } means that the boundary should also be accepted . { @ code BoundType . OPEN }
* on the other indicates that the boundary itself should not be accepted .
* @ param minMatchType the matchType that defines the lower boundary
* @ param minBound the lower { @ code BoundType }
* @ param maxMatchType the matchType that defines the upper boundary
* @ param maxBound the upper { @ code BoundType }
* @ param < T > a subclass of MatchResult
* @ param < S > a subclass of MatchType
* @ return the Predicate */
public static < T extends MatchResult , S extends MatchType > Predicate < T > withinRange ( S minMatchType , BoundType minBound , S maxMatchType , BoundType maxBound ) { } } | Predicate < T > lowerPredicate ; Predicate < T > upperPredicate ; if ( minBound . equals ( BoundType . CLOSED ) ) { lowerPredicate = greaterOrEqualTo ( minMatchType ) ; } else { lowerPredicate = greaterThan ( minMatchType ) ; } if ( maxBound . equals ( BoundType . CLOSED ) ) { upperPredicate = lowerOrEqualTo ( maxMatchType ) ; } else { upperPredicate = lowerThan ( maxMatchType ) ; } return Predicates . and ( lowerPredicate , upperPredicate ) ; |
public class LookupFriendships { /** * Usage : java twitter4j . examples . user . LookupFriendships [ screen name [ , screen name . . ] ]
* @ param args message */
public static void main ( String [ ] args ) { } } | if ( args . length < 1 ) { System . out . println ( "Usage: java twitter4j.examples.user.LookupFriendships [screen name[,screen name..]]" ) ; System . exit ( - 1 ) ; } try { Twitter twitter = new TwitterFactory ( ) . getInstance ( ) ; ResponseList < Friendship > friendships = twitter . lookupFriendships ( args [ 0 ] . split ( "," ) ) ; for ( Friendship friendship : friendships ) { System . out . println ( "@" + friendship . getScreenName ( ) + " following: " + friendship . isFollowing ( ) + " followed_by: " + friendship . isFollowedBy ( ) ) ; } System . out . println ( "Successfully looked up friendships [" + args [ 0 ] + "]." ) ; System . exit ( 0 ) ; } catch ( TwitterException te ) { te . printStackTrace ( ) ; System . out . println ( "Failed to lookup friendships: " + te . getMessage ( ) ) ; System . exit ( - 1 ) ; } |
public class AbstractItemLink { /** * Gets a reference to the item , which is asserted to be obtained from the
* strong reference . We use this when we know that the state of the item
* tells us that there is a strong reference .
* @ throws SevereMessageStoreException */
private final AbstractItem _getAndAssertItem ( ) throws SevereMessageStoreException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "_getAndAssertItem" ) ; AbstractItem item = null ; synchronized ( this ) { item = _strongReferenceToItem ; } // Defect 601995
if ( item == NULL_STRONG_REF ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "_getAndAssertItem" ) ; throw new SevereMessageStoreException ( "_getAndAssertItem" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "_getAndAssertItem" , item ) ; return item ; |
public class GetResourcePoliciesResult { /** * A key policy document , in JSON format .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setPolicies ( java . util . Collection ) } or { @ link # withPolicies ( java . util . Collection ) } if you want to override
* the existing values .
* @ param policies
* A key policy document , in JSON format .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetResourcePoliciesResult withPolicies ( String ... policies ) { } } | if ( this . policies == null ) { setPolicies ( new java . util . ArrayList < String > ( policies . length ) ) ; } for ( String ele : policies ) { this . policies . add ( ele ) ; } return this ; |
public class MolecularFormulaManipulator { /** * Correct the mass according the charge of the IMmoleculeFormula .
* Negative charge will add the mass of one electron to the mass .
* @ param mass The mass to correct
* @ param charge The charge
* @ return The mass with the correction */
private static double correctMass ( double mass , Integer charge ) { } } | if ( charge == null ) return mass ; double massE = 0.00054857990927 ; if ( charge > 0 ) mass -= massE * charge ; else if ( charge < 0 ) mass += massE * Math . abs ( charge ) ; return mass ; |
public class ConnectivityChecker { /** * Partitions the atoms in an AtomContainer into covalently connected components .
* @ param container The AtomContainer to be partitioned into connected components , i . e . molecules
* @ return A MoleculeSet .
* @ cdk . dictref blue - obelisk : graphPartitioning */
public static IAtomContainerSet partitionIntoMolecules ( IAtomContainer container ) { } } | ConnectedComponents cc = new ConnectedComponents ( GraphUtil . toAdjList ( container ) ) ; return partitionIntoMolecules ( container , cc . components ( ) ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getTLE ( ) { } } | if ( tleEClass == null ) { tleEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 320 ) ; } return tleEClass ; |
public class AssociationValue { /** * Sets the specified date attribute to the specified value .
* @ param name name of the attribute
* @ param value value of the attribute
* @ since 1.9.0 */
public void setDateAttribute ( String name , Date value ) { } } | ensureAttributes ( ) ; Attribute attribute = new DateAttribute ( value ) ; attribute . setEditable ( isEditable ( name ) ) ; getAllAttributes ( ) . put ( name , attribute ) ; |
public class WSRdbManagedConnectionImpl { /** * Adds a connection event listener to the ManagedConnection instance .
* The registered ConnectionEventListener instances are notified of connection
* close and error events , also of local transaction related events on the
* Managed Connection .
* @ param listener - a new ConnectionEventListener to be registered
* @ throws NullPointerException if you try to add a null listener . */
public void addConnectionEventListener ( ConnectionEventListener listener ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "addConnectionEventListener" , listener ) ; if ( listener == null ) throw new NullPointerException ( "Cannot add null ConnectionEventListener." ) ; // Not synchronized because of the contract that add / remove event listeners will only
// be used on ManagedConnection create / destroy , when the ManagedConnection is not
// used by any other threads .
// Add the listener to the end of the array - - if the array is full ,
// then need to create a new , bigger one
// check if the array is already full
if ( numListeners >= ivEventListeners . length ) { // there is not enough room for the listener in the array
// create a new , bigger array
// Use the standard interface for event listeners instead of J2C ' s .
ConnectionEventListener [ ] tempArray = ivEventListeners ; ivEventListeners = new ConnectionEventListener [ numListeners + CEL_ARRAY_INCREMENT_SIZE ] ; // parms : arraycopy ( Object source , int srcIndex , Object dest , int destIndex , int length )
System . arraycopy ( tempArray , 0 , ivEventListeners , 0 , tempArray . length ) ; // point out in the trace that we had to do this - consider code changes if there
// are new CELs to handle ( change KNOWN _ NUMBER _ OF _ CELS , new events ? , . . . )
if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "received more ConnectionEventListeners than expected, " + "increased array size to " + ivEventListeners . length ) ; } // add listener to the array , increment listener counter
ivEventListeners [ numListeners ++ ] = listener ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcReference ( ) { } } | if ( ifcReferenceEClass == null ) { ifcReferenceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 507 ) ; } return ifcReferenceEClass ; |
public class FeedCommProcessor { /** * Sends a message to the server asynchronously . This method returns immediately ; the message may not go out until
* some time in the future .
* @ param messageWithData the message to send */
public void sendAsync ( BasicMessageWithExtraData < ? extends BasicMessage > messageWithData ) { } } | if ( ! isConnected ( ) ) { throw new IllegalStateException ( "WebSocket connection was closed. Cannot send any messages" ) ; } BasicMessage message = messageWithData . getBasicMessage ( ) ; configurationAuthentication ( message ) ; sendExecutor . execute ( new Runnable ( ) { @ Override public void run ( ) { try { if ( messageWithData . getBinaryData ( ) == null ) { String messageString = ApiDeserializer . toHawkularFormat ( message ) ; @ SuppressWarnings ( "resource" ) Buffer buffer = new Buffer ( ) . writeUtf8 ( messageString ) ; RequestBody requestBody = RequestBody . create ( WebSocket . TEXT , buffer . readByteArray ( ) ) ; FeedCommProcessor . this . webSocket . sendMessage ( requestBody ) ; } else { BinaryData messageData = ApiDeserializer . toHawkularFormat ( message , messageWithData . getBinaryData ( ) ) ; RequestBody requestBody = new RequestBody ( ) { @ Override public MediaType contentType ( ) { return WebSocket . BINARY ; } @ Override public void writeTo ( BufferedSink bufferedSink ) throws IOException { emitToSink ( messageData , bufferedSink ) ; } } ; FeedCommProcessor . this . webSocket . sendMessage ( requestBody ) ; } } catch ( Throwable t ) { log . errorFailedToSendOverFeedComm ( message . getClass ( ) . getName ( ) , t ) ; } } } ) ; |
public class Logging { /** * Log a message at the ' fine ' debugging level .
* You should check isDebugging ( ) before building the message .
* @ param message Informational log message .
* @ param e Exception */
public void fine ( CharSequence message , Throwable e ) { } } | log ( Level . FINE , message , e ) ; |
public class CommandReader { /** * Set a breakpoint at the given function or operation name with a condition .
* @ param name
* The function or operation name .
* @ param condition
* Any condition for the breakpoint , or null .
* @ throws Exception
* Problems parsing condition . */
private void setBreakpoint ( String name , String condition ) throws Exception { } } | LexTokenReader ltr = new LexTokenReader ( name , Dialect . VDM_SL ) ; LexToken token = ltr . nextToken ( ) ; ltr . close ( ) ; Value v = null ; if ( token . is ( VDMToken . IDENTIFIER ) ) { LexIdentifierToken id = ( LexIdentifierToken ) token ; LexNameToken lnt = new LexNameToken ( interpreter . getDefaultName ( ) , id ) ; v = interpreter . findGlobal ( lnt ) ; } else if ( token . is ( VDMToken . NAME ) ) { v = interpreter . findGlobal ( ( LexNameToken ) token ) ; } if ( v instanceof FunctionValue ) { FunctionValue fv = ( FunctionValue ) v ; PExp exp = fv . body ; interpreter . clearBreakpoint ( BreakpointManager . getBreakpoint ( exp ) . number ) ; Breakpoint bp = interpreter . setBreakpoint ( exp , condition ) ; println ( "Created " + bp ) ; println ( interpreter . getSourceLine ( bp . location ) ) ; } else if ( v instanceof OperationValue ) { OperationValue ov = ( OperationValue ) v ; PStm stmt = ov . body ; interpreter . clearBreakpoint ( BreakpointManager . getBreakpoint ( stmt ) . number ) ; Breakpoint bp = interpreter . setBreakpoint ( stmt , condition ) ; println ( "Created " + bp ) ; println ( interpreter . getSourceLine ( bp . location ) ) ; } else if ( v == null ) { println ( name + " is not visible or not found" ) ; } else { println ( name + " is not a function or operation" ) ; } |
public class BaseDataAdaptiveTableLayoutAdapter { /** * Switch 2 columns with data
* @ param columnIndex column from
* @ param columnToIndex column to */
void switchTwoColumns ( int columnIndex , int columnToIndex ) { } } | for ( int i = 0 ; i < getRowCount ( ) - 1 ; i ++ ) { Object cellData = getItems ( ) [ i ] [ columnToIndex ] ; getItems ( ) [ i ] [ columnToIndex ] = getItems ( ) [ i ] [ columnIndex ] ; getItems ( ) [ i ] [ columnIndex ] = cellData ; } |
public class Interpreter { /** * Set an expression tracepoint . A tracepoint does not stop execution , but evaluates an expression before
* continuing .
* @ param exp
* The expression to trace .
* @ param trace
* The expression to evaluate .
* @ return The Breakpoint object created .
* @ throws LexException
* @ throws ParserException */
public Breakpoint setTracepoint ( PExp exp , String trace ) throws ParserException , LexException { } } | BreakpointManager . setBreakpoint ( exp , new Tracepoint ( exp . getLocation ( ) , ++ nextbreakpoint , trace ) ) ; breakpoints . put ( nextbreakpoint , BreakpointManager . getBreakpoint ( exp ) ) ; return BreakpointManager . getBreakpoint ( exp ) ; |
public class GVRBone { /** * Sets the final transform of the bone during animation .
* @ param finalTransform The transform matrix representing
* the bone ' s pose after computing the skeleton . */
public void setFinalTransformMatrix ( Matrix4f finalTransform ) { } } | float [ ] mat = new float [ 16 ] ; finalTransform . get ( mat ) ; NativeBone . setFinalTransformMatrix ( getNative ( ) , mat ) ; |
public class CommerceWarehouseUtil { /** * Returns all the commerce warehouses where groupId = & # 63 ; and active = & # 63 ; and primary = & # 63 ; .
* @ param groupId the group ID
* @ param active the active
* @ param primary the primary
* @ return the matching commerce warehouses */
public static List < CommerceWarehouse > findByG_A_P ( long groupId , boolean active , boolean primary ) { } } | return getPersistence ( ) . findByG_A_P ( groupId , active , primary ) ; |
public class WritableComparator { /** * Parse an integer from a byte array . */
public static int readInt ( byte [ ] bytes , int start ) { } } | return ( ( ( bytes [ start ] & 0xff ) << 24 ) + ( ( bytes [ start + 1 ] & 0xff ) << 16 ) + ( ( bytes [ start + 2 ] & 0xff ) << 8 ) + ( ( bytes [ start + 3 ] & 0xff ) ) ) ; |
public class IPAddressDivision { /** * If this is equivalent to the mask for a CIDR prefix length block or subnet class , it returns the prefix length .
* Otherwise , it returns null .
* A CIDR network mask is an address with all 1s in the network section ( the upper bits ) and then all 0s in the host section .
* A CIDR host mask is an address with all 0s in the network section ( the lower bits ) and then all 1s in the host section .
* The prefix length is the length of the network section .
* Also , keep in mind that the prefix length returned by this method is not equivalent to the prefix length used to construct this object .
* The prefix length used to construct indicates the network and host portion of this address .
* The prefix length returned here indicates the whether the value of this address can be used as a mask for the network and host of an address with that prefix length .
* Therefore the two values can be different values , or one can be null while the other is not .
* This method applies only to the lower value of the range if this segment represents multiple values .
* @ see IPAddressSection # getPrefixLengthForSingleBlock ( )
* @ param network whether to check for a network mask or a host mask
* @ return the prefix length corresponding to this mask , or null if there is no such prefix length */
public Integer getBlockMaskPrefixLength ( boolean network ) { } } | int hostLength = getTrailingBitCount ( network ) ; long shifted ; if ( network ) { shifted = ( ~ getDivisionValue ( ) & getMaxValue ( ) ) >>> hostLength ; } else { shifted = getDivisionValue ( ) >>> hostLength ; } return shifted == 0 ? getBitCount ( ) - hostLength : null ; |
public class SibRaConnection { /** * Creates a producer session . Checks that the connection is valid and then
* delegates . Wraps the < code > ProducerSession < / code > returned from the
* delegate in a < code > SibRaProducerSession < / code > .
* @ param destAddr
* the address of the destination
* @ param destType
* the destination type
* @ param orderingContext
* indicates that the order of messages from multiple
* ProducerSessions should be preserved
* @ param alternateUser
* the name of the user under whose authority operations of the
* ProducerSession should be performed ( may be null )
* @ return the producer session
* @ throws SIIncorrectCallException
* if the delegation fails
* @ throws SINotPossibleInCurrentConfigurationException
* if the delegation fails
* @ throws SIErrorException
* if the delegation fails
* @ throws SIResourceException
* if the delegation fails
* @ throws SITemporaryDestinationNotFoundException
* if the delegation fails
* @ throws SINotAuthorizedException
* if the delegation fails
* @ throws SILimitExceededException
* if the delegation fails
* @ throws SIConnectionLostException
* if the delegation fails
* @ throws SIConnectionUnavailableException
* if the connection is not valid
* @ throws SIConnectionDroppedException
* if the delegation fails */
@ Override public ProducerSession createProducerSession ( final SIDestinationAddress destAddr , final DestinationType destType , final OrderingContext orderingContext , final String alternateUser ) throws SIConnectionDroppedException , SIConnectionUnavailableException , SIConnectionLostException , SILimitExceededException , SINotAuthorizedException , SITemporaryDestinationNotFoundException , SIResourceException , SIErrorException , SINotPossibleInCurrentConfigurationException , SIIncorrectCallException { } } | checkValid ( ) ; final ProducerSession session = _delegateConnection . createProducerSession ( destAddr , destType , orderingContext , alternateUser ) ; return new SibRaProducerSession ( this , session ) ; |
public class InstantiationUtil { /** * Loads a class by name from the given input stream and reflectively instantiates it .
* < p > This method will use { @ link DataInputView # readUTF ( ) } to read the class name , and
* then attempt to load the class from the given ClassLoader .
* < p > The resolved class is checked to be equal to or a subtype of the given supertype
* class .
* @ param in The stream to read the class name from .
* @ param cl The class loader to resolve the class .
* @ param supertype A class that the resolved class must extend .
* @ throws IOException Thrown , if the class name could not be read , the class could not be found ,
* or the class is not a subtype of the given supertype class . */
public static < T > Class < T > resolveClassByName ( DataInputView in , ClassLoader cl , Class < ? super T > supertype ) throws IOException { } } | final String className = in . readUTF ( ) ; final Class < ? > rawClazz ; try { rawClazz = Class . forName ( className , false , cl ) ; } catch ( ClassNotFoundException e ) { throw new IOException ( "Could not find class '" + className + "' in classpath." , e ) ; } if ( ! supertype . isAssignableFrom ( rawClazz ) ) { throw new IOException ( "The class " + className + " is not a subclass of " + supertype . getName ( ) ) ; } @ SuppressWarnings ( "unchecked" ) Class < T > clazz = ( Class < T > ) rawClazz ; return clazz ; |
public class ParserUtil { /** * Parse a BELScript list expression into a { @ code String [ ] array } .
* @ param line the list expression , which should not be { @ code null }
* @ return the parsed tokens in the list , or { @ code null } if { @ code line }
* is { @ code null } */
public static String [ ] parseListRecord ( String line ) { } } | // return null if line is blank
if ( noLength ( line ) ) { return null ; } // trim away leading / trailing whitespace from list record
line = line . trim ( ) ; // remove list boundaries
if ( line . startsWith ( "{" ) ) { line = line . substring ( 1 ) ; } if ( line . endsWith ( "}" ) ) { line = line . substring ( 0 , line . length ( ) - 1 ) ; } // return no fields if list record is empty
if ( line . trim ( ) . length ( ) == 0 ) { return new String [ 0 ] ; } // split fields
final CharacterIterator chit = new StringCharacterIterator ( line ) ; final List < String > fields = new ArrayList < String > ( ) ; final StringBuilder fb = new StringBuilder ( ) ; boolean inField = false ; for ( char c = chit . first ( ) ; c != CharacterIterator . DONE ; c = chit . next ( ) ) { if ( ! inField && c == FIELD_BOUNDARY ) { inField = true ; fb . setLength ( 0 ) ; } else if ( inField && c == FIELD_BOUNDARY ) { int mark = chit . getIndex ( ) ; char previousChar = chit . previous ( ) ; if ( previousChar == ESCAPE ) { fb . deleteCharAt ( fb . length ( ) - 1 ) ; fb . append ( c ) ; } else { inField = false ; } chit . setIndex ( mark ) ; } else if ( ! inField && c == FIELD_SEPARATOR ) { fields . add ( fb . toString ( ) ) ; } else if ( inField ) { fb . append ( c ) ; } } if ( ! line . endsWith ( "," ) ) { fields . add ( fb . toString ( ) ) ; } return fields . toArray ( new String [ fields . size ( ) ] ) ; |
public class StatsInterface { /** * Get a list of referring domains for a collection .
* @ param date
* ( Required ) Stats will be returned for this date . A day according to Flickr Stats starts at midnight GMT for all users , and timestamps will
* automatically be rounded down to the start of the day .
* @ param collectionId
* ( Optional ) The id of the collection to get stats for . If not provided , stats for all collections will be returned .
* @ param perPage
* ( Optional ) Number of domains to return per page . If this argument is omitted , it defaults to 25 . The maximum allowed value is 100.
* @ param page
* ( Optional ) The page of results to return . If this argument is omitted , it defaults to 1.
* @ see " http : / / www . flickr . com / services / api / flickr . stats . getCollectionDomains . html " */
public DomainList getCollectionDomains ( Date date , String collectionId , int perPage , int page ) throws FlickrException { } } | return getDomains ( METHOD_GET_COLLECTION_DOMAINS , "collection_id" , collectionId , date , perPage , page ) ; |
public class ImportExportController { /** * Delete an uPortal database object . This method provides a REST interface for uPortal database
* object deletion .
* < p > The path for this method is / entity / type / identifier . The identifier generally a string
* that may be used as a unique identifier , but is dependent on the entity type . For example , to
* delete the " demo " user one might use the path / entity / user / demo . */
@ RequestMapping ( value = "/entity/{entityType}/{entityId}" , method = RequestMethod . DELETE ) public void deleteEntity ( @ PathVariable ( "entityType" ) String entityType , @ PathVariable ( "entityId" ) String entityId , HttpServletRequest request , HttpServletResponse response ) throws IOException { } } | final IPerson person = personManager . getPerson ( request ) ; final EntityIdentifier ei = person . getEntityIdentifier ( ) ; final IAuthorizationPrincipal ap = AuthorizationServiceFacade . instance ( ) . newPrincipal ( ei . getKey ( ) , ei . getType ( ) ) ; if ( ! ap . hasPermission ( IPermission . PORTAL_SYSTEM , IPermission . DELETE_ACTIVITY , entityType ) ) { response . setStatus ( HttpServletResponse . SC_UNAUTHORIZED ) ; return ; } // get the task associated with exporting this entity type
portalDataHandlerService . deleteData ( entityType , entityId ) ; response . setStatus ( HttpServletResponse . SC_NO_CONTENT ) ; |
public class V2ManifestModel { /** * { @ inheritDoc } */
@ Override public ManifestModel setRemote ( RemoteModel remote ) { } } | Configuration config = getModelConfiguration ( ) ; config . removeChildren ( _jmsRemoteQName ) ; config . removeChildren ( _restRemoteQName ) ; setChildModel ( remote ) ; _remote = remote ; return this ; |
public class Pipe { /** * Dimensiona il tubo adottando una pendenza pari a quella del terreno
* Sostanzialmente uguale alla get _ height _ 2 . L ' unica differenza e che
* dimensiona iltubo adottando una pendenza pari a quella del terreno .
* @ param slope
* [ % ] Pendenza del tratto da dimensionare , che e ' pari a quella
* del terreno
* @ param g
* Grado di riempimento da considerare nella progettazione della
* rete
* @ param maxd
* [ cm ] Diamtetro o altezza piu ' grande adottati nei tratti piu ' a
* monte
* @ param c
* Rapporto tra base e altezza nella sezione trapezoidale . */
private double getHeight2I ( double slope , double g , double maxd , double c ) { } } | /* B Pari a A * ( Rh ^ 1/6 ) . */
double B ; /* * oldD [ cm ] Altezza della sezione trapezoidale calcolato imponendo una
* pendenza pari a quella del terreno . */
double oldD ; /* D [ cm ] Altezza della sezione trapezoidale . */
double D ; /* oldb [ cm ] Base della sezione trapezoidale . */
double oldb ; /* b [ cm ] la Base della sezione trapezoidale finale . */
double b ; /* newrh [ cm ] Raggio idraulico . */
double rh ; /* B = A ( Rh ^ 2/3 ) [ m ^ 13/6] */
B = ( discharge ) / ( CUBICMETER2LITER * ks * sqrt ( slope / METER2CM ) ) ; /* altezza sezione rettangolare */
oldD = METER2CM * pow ( B , THREEOVEREIGHT ) * pow ( ( c + 2 * sqrt ( 2 ) * g ) , ONEOVERFOUR ) / pow ( ( g * ( g + c ) ) , FIVEOVEREIGHT ) ; /* [ cm ] base */
oldb = c * oldD ; D = ceil ( oldD ) ; b = c * D ; diameter = D ; /* Rh [ cm ] raggio idraulico . */
rh = g * oldD * ( g * oldD + oldb ) / ( oldb + 2 * sqrt ( 2 ) * g * oldD ) ; /* Grado di riempimento del tubo */
emptyDegree = ( ( 2 * sqrt ( 2 ) * rh * D - D * b ) + sqrt ( pow ( ( D * b - 2 * sqrt ( 2 ) * rh * D ) , 2 ) + 4 * D * D * rh * b ) ) / ( 2 * D * D ) ; /* pendenza del tratto progettato [ % ] */
pipeSlope = slope ; return b ; |
public class LogBuffer { /** * Return next 16 - bit unsigned int from buffer . ( big - endian )
* @ see mysql - 5.6.10 / include / myisampack . h - mi _ usint2korr */
public final int getBeUint16 ( ) { } } | if ( position + 1 >= origin + limit ) throw new IllegalArgumentException ( "limit excceed: " + ( position - origin + 1 ) ) ; byte [ ] buf = buffer ; return ( ( 0xff & buf [ position ++ ] ) << 8 ) | ( 0xff & buf [ position ++ ] ) ; |
public class RectangularPrism { /** * Returns the radius of a circumscribed sphere ( length of diagonal of
* rectangular prism / 2 , that goes through at least four vertices
* @ return the cirumscribedRadius */
@ Override public double getCirumscribedRadius ( ) { } } | return 0.5 * Math . sqrt ( width * width + height * height + length * length ) ; |
public class BinaryHeapQueue { /** * Returns the Queueable on top of heap and remove it .
* @ return the Queueable at top of heap
* @ throws NoSuchElementException if < code > isEmpty ( ) = = true < / code > */
public Activation dequeue ( ) throws NoSuchElementException { } } | if ( isEmpty ( ) ) { return null ; } final Activation result = this . elements [ 1 ] ; dequeue ( result . getQueueIndex ( ) ) ; return result ; |
public class DescribeEnvironmentManagedActionHistoryResult { /** * A list of completed and failed managed actions .
* @ param managedActionHistoryItems
* A list of completed and failed managed actions . */
public void setManagedActionHistoryItems ( java . util . Collection < ManagedActionHistoryItem > managedActionHistoryItems ) { } } | if ( managedActionHistoryItems == null ) { this . managedActionHistoryItems = null ; return ; } this . managedActionHistoryItems = new com . amazonaws . internal . SdkInternalList < ManagedActionHistoryItem > ( managedActionHistoryItems ) ; |
public class ConfigValueFactory { /** * See the fromAnyRef ( ) documentation for details . This is a typesafe
* wrapper that only works on { @ link java . util . Map } and returns
* { @ link ConfigObject } rather than { @ link ConfigValue } .
* If your Map has a key " foo . bar " then you will lookup one object with a key
* called " foo . bar " , rather than an object with a key " foo " containing
* another object with a key " bar " . The keys in the map are keys ; not path
* expressions . That is , the Map corresponds exactly to a single
* { @ code ConfigObject } . The keys will not be parsed or modified , and the
* values are wrapped in ConfigValue . To lookup nested { @ code ConfigObject } ,
* some of the values in the map would have to be more maps .
* See also { @ link ConfigFactory # parseMap ( Map , String ) } which interprets the
* keys in the map as path expressions .
* @ param values
* @ param originDescription
* @ return a new { @ link ConfigObject } value */
public static ConfigObject fromMap ( Map < String , ? extends Object > values , String originDescription ) { } } | return ( ConfigObject ) fromAnyRef ( values , originDescription ) ; |
public class JPAComponentImpl { /** * Locates and processes all persistence . xml file in a WAR module . < p >
* @ param applInfo the application archive information
* @ param module the WAR module archive information */
private void processWebModulePersistenceXml ( JPAApplInfo applInfo , ContainerInfo warContainerInfo , ClassLoader warClassLoader ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "processWebModulePersistenceXml : " + applInfo . getApplName ( ) + "#" + warContainerInfo ) ; } String archiveName = warContainerInfo . getName ( ) ; Container warContainer = warContainerInfo . getContainer ( ) ; // JPA 2.0 Specification - 8.2 Persistence Unit Packaging
// A persistence unit is defined by a persistence . xml file . The jar file or
// directory whose META - INF directory contains the persistence . xml file is
// termed the root of the persistence unit . In Java EE environments , the
// root of a persistence unit may be one of the following :
// - > the WEB - INF / classes directory of a WAR file
// - > a jar file in the WEB - INF / lib directory of a WAR file
// Obtain any persistence . xml in WEB - INF / classes / META - INF
Entry pxml = warContainer . getEntry ( "WEB-INF/classes/META-INF/persistence.xml" ) ; if ( pxml != null ) { String appName = applInfo . getApplName ( ) ; URL puRoot = getPXmlRootURL ( appName , archiveName , pxml ) ; applInfo . addPersistenceUnits ( new OSGiJPAPXml ( applInfo , archiveName , JPAPuScope . Web_Scope , puRoot , warClassLoader , pxml ) ) ; } // Obtain any persistenc . xml in WEB - INF / lib / * . jar . This includes ' utility '
// jars and web fragments . Any PUs found are WEB scoped and considered to
// be in the WAR , so just use the WAR archiveName ( don ' t use a root prefix
// that is prepended to the jar / fragment name ) .
Entry webInfLib = warContainer . getEntry ( "WEB-INF/lib/" ) ; if ( webInfLib != null ) { try { Container webInfLibContainer = webInfLib . adapt ( Container . class ) ; processLibraryJarPersistenceXml ( applInfo , webInfLibContainer , archiveName , null , JPAPuScope . Web_Scope , warClassLoader ) ; } catch ( UnableToAdaptException ex ) { // Should never occur . . . just propagate failure
throw new RuntimeException ( "Failure locating persistence.xml" , ex ) ; } } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "processWebModulePersistenceXml : " + applInfo . getApplName ( ) + "#" + warContainer ) ; |
public class Coref { /** * setter for corefType - sets
* @ generated
* @ param v value to set into the feature */
public void setCorefType ( String v ) { } } | if ( Coref_Type . featOkTst && ( ( Coref_Type ) jcasType ) . casFeat_corefType == null ) jcasType . jcas . throwFeatMissing ( "corefType" , "de.julielab.jules.types.muc7.Coref" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Coref_Type ) jcasType ) . casFeatCode_corefType , v ) ; |
public class FnBigInteger { /** * Determines whether the target object and the specified object are NOT equal
* by calling the < tt > equals < / tt > method on the target object .
* @ param object the { @ link BigInteger } to compare to the target
* @ return false if both objects are equal , true if not . */
public static final Function < BigInteger , Boolean > notEq ( final BigInteger object ) { } } | return ( Function < BigInteger , Boolean > ) ( ( Function ) FnObject . notEq ( object ) ) ; |
public class AbstractDoclet { /** * Iterate through all classes and construct documentation for them .
* @ param root the RootDoc of source to document .
* @ param classtree the data structure representing the class tree . */
protected void generateClassFiles ( RootDoc root , ClassTree classtree ) { } } | generateClassFiles ( classtree ) ; PackageDoc [ ] packages = root . specifiedPackages ( ) ; for ( PackageDoc pkg : packages ) { generateClassFiles ( pkg . allClasses ( ) , classtree ) ; } |
public class AzkabanExecutorServer { /** * Get the hostname
* @ return hostname */
public String getHost ( ) { } } | if ( this . props . containsKey ( ConfigurationKeys . AZKABAN_SERVER_HOST_NAME ) ) { final String hostName = this . props . getString ( Constants . ConfigurationKeys . AZKABAN_SERVER_HOST_NAME ) ; if ( ! StringUtils . isEmpty ( hostName ) ) { return hostName ; } } String host = "unkownHost" ; try { host = InetAddress . getLocalHost ( ) . getCanonicalHostName ( ) ; } catch ( final Exception e ) { logger . error ( "Failed to fetch LocalHostName" ) ; } return host ; |
public class KeyGroupRangeAssignment { /** * Computes the range of key - groups that are assigned to a given operator under the given parallelism and maximum
* parallelism .
* IMPORTANT : maxParallelism must be < = Short . MAX _ VALUE to avoid rounding problems in this method . If we ever want
* to go beyond this boundary , this method must perform arithmetic on long values .
* @ param maxParallelism Maximal parallelism that the job was initially created with .
* @ param parallelism The current parallelism under which the job runs . Must be < = maxParallelism .
* @ param operatorIndex Id of a key - group . 0 < = keyGroupID < maxParallelism .
* @ return the computed key - group range for the operator . */
public static KeyGroupRange computeKeyGroupRangeForOperatorIndex ( int maxParallelism , int parallelism , int operatorIndex ) { } } | checkParallelismPreconditions ( parallelism ) ; checkParallelismPreconditions ( maxParallelism ) ; Preconditions . checkArgument ( maxParallelism >= parallelism , "Maximum parallelism must not be smaller than parallelism." ) ; int start = ( ( operatorIndex * maxParallelism + parallelism - 1 ) / parallelism ) ; int end = ( ( operatorIndex + 1 ) * maxParallelism - 1 ) / parallelism ; return new KeyGroupRange ( start , end ) ; |
public class MessageStoreImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . msgstore . MessageStore # findById ( long ) */
@ Override public AbstractItem findById ( long itemID ) throws MessageStoreException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "findById" , Long . valueOf ( itemID ) ) ; AbstractItem item = _findById ( itemID ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "findById" , item ) ; return item ; |
public class HttpOutputStream { public void destroy ( ) { } } | if ( _bufferedOut != null ) _bufferedOut . destroy ( ) ; _bufferedOut = null ; if ( _iso8859writer != null ) _iso8859writer . destroy ( ) ; _iso8859writer = null ; if ( _utf8writer != null ) _utf8writer . destroy ( ) ; _utf8writer = null ; if ( _asciiwriter != null ) _asciiwriter . destroy ( ) ; _asciiwriter = null ; |
public class CommerceVirtualOrderItemLocalServiceBaseImpl { /** * Deletes the commerce virtual order item from the database . Also notifies the appropriate model listeners .
* @ param commerceVirtualOrderItem the commerce virtual order item
* @ return the commerce virtual order item that was removed */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceVirtualOrderItem deleteCommerceVirtualOrderItem ( CommerceVirtualOrderItem commerceVirtualOrderItem ) { } } | return commerceVirtualOrderItemPersistence . remove ( commerceVirtualOrderItem ) ; |
public class JDBCPersistenceManagerImpl { /** * Obviously would be nice if the code writing this special format were in the same place as this
* code reading it .
* Assumes format like :
* JOBINSTANCEDATA
* ( jobinstanceid name , . . . )
* 1197 , " partitionMetrics " , " NOTSET "
* 1198 , " : 1197 : step1:0 " , " NOTSET "
* 1199 , " : 1197 : step1:1 " , " NOTSET "
* 1200 , " : 1197 : step2:0 " , " NOTSET "
* @ param rootJobExecutionId JobExecution id of the top - level job
* @ param stepName Step name of the top - level stepName */
private String getPartitionLevelJobInstanceWildCard ( long rootJobInstanceId , String stepName ) { } } | StringBuilder sb = new StringBuilder ( ":" ) ; sb . append ( Long . toString ( rootJobInstanceId ) ) ; sb . append ( ":" ) ; sb . append ( stepName ) ; sb . append ( ":%" ) ; return sb . toString ( ) ; |
public class JsUtils { /** * Set a property to a javascript object . */
public static void prop ( JavaScriptObject o , Object id , Object val ) { } } | if ( o != null ) { o . < JsCache > cast ( ) . put ( id , val ) ; } |
public class SequenceRecordReaderDataSetIterator { /** * Load a multiple sequence examples to a DataSet , using the provided RecordMetaData instances .
* @ param list List of RecordMetaData instances to load from . Should have been produced by the record reader provided
* to the SequenceRecordReaderDataSetIterator constructor
* @ return DataSet with the specified examples
* @ throws IOException If an error occurs during loading of the data */
public DataSet loadFromMetaData ( List < RecordMetaData > list ) throws IOException { } } | if ( underlying == null ) { SequenceRecord r = recordReader . loadSequenceFromMetaData ( list . get ( 0 ) ) ; initializeUnderlying ( r ) ; } // Two cases : single vs . multiple reader . . .
List < RecordMetaData > l = new ArrayList < > ( list . size ( ) ) ; if ( singleSequenceReaderMode ) { for ( RecordMetaData m : list ) { l . add ( new RecordMetaDataComposableMap ( Collections . singletonMap ( READER_KEY , m ) ) ) ; } } else { for ( RecordMetaData m : list ) { RecordMetaDataComposable rmdc = ( RecordMetaDataComposable ) m ; Map < String , RecordMetaData > map = new HashMap < > ( 2 ) ; map . put ( READER_KEY , rmdc . getMeta ( ) [ 0 ] ) ; map . put ( READER_KEY_LABEL , rmdc . getMeta ( ) [ 1 ] ) ; l . add ( new RecordMetaDataComposableMap ( map ) ) ; } } return mdsToDataSet ( underlying . loadFromMetaData ( l ) ) ; |
public class KafkaCanalConnector { /** * 订阅topic */
@ Override public void subscribe ( ) { } } | waitClientRunning ( ) ; if ( ! running ) { return ; } if ( partition == null ) { if ( kafkaConsumer != null ) { kafkaConsumer . subscribe ( Collections . singletonList ( topic ) ) ; } if ( kafkaConsumer2 != null ) { kafkaConsumer2 . subscribe ( Collections . singletonList ( topic ) ) ; } } else { TopicPartition topicPartition = new TopicPartition ( topic , partition ) ; if ( kafkaConsumer != null ) { kafkaConsumer . assign ( Collections . singletonList ( topicPartition ) ) ; } if ( kafkaConsumer2 != null ) { kafkaConsumer2 . assign ( Collections . singletonList ( topicPartition ) ) ; } } |
public class EvernoteUtil { /** * Returns an Intent to query the bootstrap profile name from the main Evernote app . This is useful
* if you want to use the main app to authenticate the user and he is already signed in .
* @ param context The { @ link Context } starting the { @ link Intent } .
* @ param evernoteSession The current session .
* @ return An Intent to query the bootstrap profile name . Returns { @ code null } , if the main app
* is not installed , not up to date or you do not want to use the main app to authenticate the
* user . */
public static Intent createGetBootstrapProfileNameIntent ( Context context , EvernoteSession evernoteSession ) { } } | if ( evernoteSession . isForceAuthenticationInThirdPartyApp ( ) ) { // we don ' t want to use the main app , return null
return null ; } EvernoteUtil . EvernoteInstallStatus installStatus = EvernoteUtil . getEvernoteInstallStatus ( context , EvernoteUtil . ACTION_GET_BOOTSTRAP_PROFILE_NAME ) ; if ( ! EvernoteUtil . EvernoteInstallStatus . INSTALLED . equals ( installStatus ) ) { return null ; } return new Intent ( EvernoteUtil . ACTION_GET_BOOTSTRAP_PROFILE_NAME ) . setPackage ( PACKAGE_NAME ) ; |
public class ProcessingConfigurationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ProcessingConfiguration processingConfiguration , ProtocolMarshaller protocolMarshaller ) { } } | if ( processingConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( processingConfiguration . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( processingConfiguration . getProcessors ( ) , PROCESSORS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ConnectionService { /** * Returns a list of connection records objects which are backed by the
* models in the given list .
* @ param models
* The model objects to use to back the connection record objects
* within the returned list .
* @ return
* A list of connection record objects which are backed by the models
* in the given list . */
protected List < ConnectionRecord > getObjectInstances ( List < ConnectionRecordModel > models ) { } } | // Create new list of records by manually converting each model
List < ConnectionRecord > objects = new ArrayList < ConnectionRecord > ( models . size ( ) ) ; for ( ConnectionRecordModel model : models ) objects . add ( getObjectInstance ( model ) ) ; return objects ; |
public class TextProcessUtility { /** * 预处理 , 去除标点 , 空格和停用词
* @ param text
* @ return */
public static String preprocess ( String text ) { } } | return text . replaceAll ( "\\p{P}" , " " ) . replaceAll ( "\\s+" , " " ) . toLowerCase ( Locale . getDefault ( ) ) ; |
public class RatePlanReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return RatePlan ResourceSet */
@ Override public ResourceSet < RatePlan > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class OWLSubAnnotationPropertyOfAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } .
* @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the
* object ' s content to
* @ param instance the object instance to serialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the serialization operation is not
* successful */
@ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLSubAnnotationPropertyOfAxiomImpl instance ) throws SerializationException { } } | serialize ( streamWriter , instance ) ; |
public class WebDriverHelper { /** * Returns a DesiredCapabilities object specific to the { @ code browser }
* being sent as parameter . If the browser does not exists if default to
* firefox .
* @ param browser
* the browser name
* @ return a specific DesiredCapabilities object corresponding to the
* supplied browser */
private static DesiredCapabilities getCapabilitiesBrowser ( final String browser ) { } } | DesiredCapabilities capabilities = null ; Method [ ] methods = DesiredCapabilities . class . getDeclaredMethods ( ) ; for ( Method m : methods ) { if ( m . getName ( ) . equalsIgnoreCase ( browser ) ) { try { capabilities = ( DesiredCapabilities ) m . invoke ( null , ( Object [ ] ) null ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; LOG . debug ( "Browser: " + browser + " is not valid. Defaulting to Firefox!" ) ; } } } if ( capabilities == null ) { LOG . debug ( "Browser: " + browser + " is not valid. Defaulting to Firefox!" ) ; capabilities = DesiredCapabilities . firefox ( ) ; } return capabilities ; |
public class SampledHTTPRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SampledHTTPRequest sampledHTTPRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( sampledHTTPRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sampledHTTPRequest . getRequest ( ) , REQUEST_BINDING ) ; protocolMarshaller . marshall ( sampledHTTPRequest . getWeight ( ) , WEIGHT_BINDING ) ; protocolMarshaller . marshall ( sampledHTTPRequest . getTimestamp ( ) , TIMESTAMP_BINDING ) ; protocolMarshaller . marshall ( sampledHTTPRequest . getAction ( ) , ACTION_BINDING ) ; protocolMarshaller . marshall ( sampledHTTPRequest . getRuleWithinRuleGroup ( ) , RULEWITHINRULEGROUP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class TokenApprovalStore { /** * Extract the implied approvals from any tokens associated with the user and client id supplied .
* @ see org . springframework . security . oauth2 . provider . approval . ApprovalStore # getApprovals ( java . lang . String ,
* java . lang . String ) */
@ Override public Collection < Approval > getApprovals ( String userId , String clientId ) { } } | Collection < Approval > result = new HashSet < Approval > ( ) ; Collection < OAuth2AccessToken > tokens = store . findTokensByClientIdAndUserName ( clientId , userId ) ; for ( OAuth2AccessToken token : tokens ) { OAuth2Authentication authentication = store . readAuthentication ( token ) ; if ( authentication != null ) { Date expiresAt = token . getExpiration ( ) ; for ( String scope : token . getScope ( ) ) { result . add ( new Approval ( userId , clientId , scope , expiresAt , ApprovalStatus . APPROVED ) ) ; } } } return result ; |
public class JpaBaseRepository { /** * { @ inheritDoc } */
@ Override public void remove ( ID entityId ) { } } | T entity = getEntityManager ( ) . find ( getEntityClass ( ) , entityId ) ; // Case of attached entity - simply remove it
if ( getEntityManager ( ) . contains ( entity ) ) { getEntityManager ( ) . remove ( entity ) ; } // Case of unattached entity , first it is necessary to perform
// a merge , before doing the remove
else { entity = getEntityManager ( ) . merge ( entity ) ; getEntityManager ( ) . remove ( entity ) ; } |
public class ResilientOperation { /** * Returns a { @ link CheckedCallable } that encompasses a { @ link AbstractGoogleClientRequest } and
* can be used to retry the execution for an AbstractGoogleClientRequest .
* @ param request The AbstractGoogleClientRequest to turn into a { @ link CheckedCallable } .
* @ return a CheckedCallable object that attempts a AbstractGoogleClientRequest */
public static < V > CheckedCallable < V , IOException > getGoogleRequestCallable ( AbstractGoogleClientRequest < V > request ) { } } | return new AbstractGoogleClientRequestExecutor < > ( request ) ; |
public class FuncNormalizeSpace { /** * Execute an expression in the XPath runtime context , and return the
* result of the expression .
* @ param xctxt The XPath runtime context .
* @ return The result of the expression in the form of a < code > XObject < / code > .
* @ throws javax . xml . transform . TransformerException if a runtime exception
* occurs . */
public void executeCharsToContentHandler ( XPathContext xctxt , ContentHandler handler ) throws javax . xml . transform . TransformerException , org . xml . sax . SAXException { } } | if ( Arg0IsNodesetExpr ( ) ) { int node = getArg0AsNode ( xctxt ) ; if ( DTM . NULL != node ) { DTM dtm = xctxt . getDTM ( node ) ; dtm . dispatchCharactersEvents ( node , handler , true ) ; } } else { XObject obj = execute ( xctxt ) ; obj . dispatchCharactersEvents ( handler ) ; } |
public class UploadPhoto { /** * The assumption here is that for a given set only unique file - names will be loaded and the title field can be used . Later change to use the tags field (
* OrigFileName ) and strip off the suffix .
* @ param filename
* @ return */
private boolean checkIfLoaded ( String filename ) { } } | String title ; if ( basefilename . lastIndexOf ( '.' ) > 0 ) title = basefilename . substring ( 0 , basefilename . lastIndexOf ( '.' ) ) ; else return false ; if ( filePhotos . containsKey ( title ) ) return true ; return false ; |
public class FileProperties { /** * This class will try to find the current value " name " into the System
* Properties or into the default file . If it can ' t be found it will use the
* mother class La propiededd tien eque ser de la fdorma siguiente
* es . nombre . Clase . nombrePropiedad */
private String getValue ( Class < ? > section , String name ) { } } | if ( section == null || name == null ) { log . warn ( "Section or name is null, Section:'" + section + "' name:'" + name + "'" ) ; return null ; } if ( fildes != null && lastModified != fildes . lastModified ( ) ) { getFileProperties ( filePath ) ; } // Getting the full name
String propertyName = getPropertyName ( section , name ) ; // The tryng from file properties
Object obj = fileProperties . get ( propertyName ) ; if ( obj != null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "propertyName:'" + propertyName + "' and value:'" + obj + "' from file:'" + filePath + "'" ) ; } if ( ObjectUtils . NULL . equals ( obj ) ) { return null ; } else { return ( String ) obj ; } } // Else using the short name into the file properties
String ret = ( String ) fileProperties . get ( name ) ; if ( ret != null ) { return ( String ) ret ; } // Si es nulo al final se guardara en la cache
// / / try from the default path
// Properties prop = FilePropertyUtils . getProperties ( section ) ;
// if ( prop ! = null ) {
// / / try the full cualified name
// ret = prop . getProperty ( name ) ;
// if ( ret ! = null ) {
// log . debug ( " propertyName : ' " + propertyName + " ' from file . prop " ) ;
// fileProperties . put ( propertyName , ret ) ;
// return ( String ) ret ;
// Getting property from system Propertie
ret = ( String ) System . getProperty ( propertyName ) ; if ( ret != null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Property name:'" + propertyName + "' found in system properties." ) ; } return ret ; } // Sets the NULL value to this key
fileProperties . put ( propertyName , ObjectUtils . NULL ) ; return null ; |
public class ConcurrentTokenizer { /** * Initializes the type system . */
public void typeSystemInit ( TypeSystem typeSystem ) throws AnalysisEngineProcessException { } } | super . typeSystemInit ( typeSystem ) ; probabilityFeature = AnnotatorUtil . getOptionalFeatureParameter ( context , tokenType , UimaUtil . PROBABILITY_FEATURE_PARAMETER , CAS . TYPE_NAME_DOUBLE ) ; |
public class BoundedBuffer { /** * @ awisniew - ADDED
* ( non - Javadoc )
* @ see java . util . Queue # poll ( ) */
@ Override public T poll ( ) { } } | T old = null ; boolean expedited = false ; synchronized ( this ) { if ( numberOfUsedExpeditedSlots . get ( ) > 0 ) { old = expeditedExtract ( ) ; numberOfUsedExpeditedSlots . getAndDecrement ( ) ; expedited = true ; } else if ( numberOfUsedSlots . get ( ) > 0 ) { old = extract ( ) ; numberOfUsedSlots . getAndDecrement ( ) ; } } if ( old != null ) { // TODO if expedited is added for put or offer with timeout add notification here
if ( ! expedited ) notifyPut_ ( ) ; } return old ; |
public class ConfigurationUtils { /** * Loads cluster default values from the meta master .
* Only client scope properties will be loaded .
* If cluster level configuration has been loaded or the feature of loading configuration from
* meta master is disabled , no RPC will be issued .
* @ param address the master address
* @ param conf configuration to use
* @ return a configuration object containing the original configuration merged with cluster
* defaults , or the original object if the cluster defaults have already been loaded */
public static AlluxioConfiguration loadClusterDefaults ( InetSocketAddress address , AlluxioConfiguration conf ) throws AlluxioStatusException { } } | if ( shouldLoadClusterConfiguration ( conf ) ) { GetConfigurationPResponse response = loadConfiguration ( address , conf ) ; conf = loadClusterConfiguration ( response , conf ) ; } return conf ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.