signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AmazonConnectClient { /** * Returns a < code > UserSummaryList < / code > , which is an array of < code > UserSummary < / code > objects . * @ param listUsersRequest * @ return Result of the ListUsers operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws InvalidParameterException * One or more of the parameters provided to the operation are not valid . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws ThrottlingException * The throttling limit has been exceeded . * @ throws InternalServiceException * Request processing failed due to an error or failure with the service . * @ sample AmazonConnect . ListUsers * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / connect - 2017-08-08 / ListUsers " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListUsersResult listUsers ( ListUsersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListUsers ( request ) ;
public class BiConsumerCallbackManager { /** * Register a new callback * @ param symbol * @ param callback * @ throws BitfinexClientException */ public void registerCallback ( final S symbol , final BiConsumer < S , T > callback ) throws BitfinexClientException { } }
final List < BiConsumer < S , T > > callbackList = callbacks . computeIfAbsent ( symbol , ( k ) -> new CopyOnWriteArrayList < > ( ) ) ; callbackList . add ( callback ) ;
public class OfferService { /** * This methods arranges for the data node to send the block report at the next heartbeat . */ public void scheduleBlockReport ( long delay ) { } }
if ( delay > 0 ) { // send BR after random delay lastBlockReport = System . currentTimeMillis ( ) - ( anode . blockReportInterval - R . nextInt ( ( int ) ( delay ) ) ) ; } else { // send at next heartbeat lastBlockReport = lastHeartbeat - anode . blockReportInterval ; } resetBlockReportTime = true ; // reset future BRs for randomness
public class SSECustomerKey { /** * Constructs a new SSECustomerKey that can be used for generating the * presigned URL ' s . * Currently , " AES256 " is the only supported algorithm . * @ see SSEAlgorithm # AES256 * @ param algorithm * The server - side encryption algorithm to use with this * customer - provided server - side encryption key ; must not be * null . * @ throws IllegalArgumentException * if the input parameter is null . */ public static SSECustomerKey generateSSECustomerKeyForPresignUrl ( String algorithm ) { } }
if ( algorithm == null ) throw new IllegalArgumentException ( ) ; return new SSECustomerKey ( ) . withAlgorithm ( algorithm ) ;
public class InfoList { /** * Get the String representations of all items in this list , by calling { @ code toString ( ) } on each item in the * list . * @ return The String representations of all items in this list , by calling { @ code toString ( ) } on each item in * the list . */ public List < String > getAsStrings ( ) { } }
if ( this . isEmpty ( ) ) { return Collections . emptyList ( ) ; } else { final List < String > toStringVals = new ArrayList < > ( this . size ( ) ) ; for ( final T i : this ) { toStringVals . add ( i == null ? "null" : i . toString ( ) ) ; } return toStringVals ; }
public class NumFailedJobMetric { /** * Listen for events to maintain correct value of number of failed jobs { @ inheritDoc } * @ see azkaban . event . EventListener # handleEvent ( azkaban . event . Event ) */ @ Override public synchronized void handleEvent ( final Event event ) { } }
if ( event . getType ( ) == EventType . JOB_FINISHED && Status . FAILED . equals ( event . getData ( ) . getStatus ( ) ) ) { this . value = this . value + 1 ; }
public class ContextualStoreImpl { /** * Given a particular id , return the correct contextual . For contextuals * which aren ' t passivation capable , the contextual can ' t be found in another * container , and null will be returned . * @ param id An identifier for the contextual * @ return the contextual */ public < C extends Contextual < I > , I > C getContextual ( String id ) { } }
return this . < C , I > getContextual ( new StringBeanIdentifier ( id ) ) ;
public class ListFacesResult { /** * An array of < code > Face < / code > objects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFaces ( java . util . Collection ) } or { @ link # withFaces ( java . util . Collection ) } if you want to override the * existing values . * @ param faces * An array of < code > Face < / code > objects . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListFacesResult withFaces ( Face ... faces ) { } }
if ( this . faces == null ) { setFaces ( new java . util . ArrayList < Face > ( faces . length ) ) ; } for ( Face ele : faces ) { this . faces . add ( ele ) ; } return this ;
public class AbstractSingleton { /** * Set all internal status variables to the values read from the specified * { @ link ObjectInputStream } . This can be used to make singletons * serializable . * @ param aOIS * The input stream to read from . May not be < code > null < / code > . * @ throws IOException * In case reading failed * @ throws ClassNotFoundException * In case reading failed */ protected final void readAbstractSingletonFields ( @ Nonnull final ObjectInputStream aOIS ) throws IOException , ClassNotFoundException { } }
m_aStatus = ( BitSet ) aOIS . readObject ( ) ;
public class Numeraire { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime . * Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time . * cash - flows prior evaluationTime are not considered . * @ param evaluationTime The time on which this products value should be observed . * @ param model The model used to price the product . * @ return The random variable representing the value of the product discounted to evaluation time * @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */ @ Override public RandomVariableInterface getValue ( double evaluationTime , LIBORModelMonteCarloSimulationInterface model ) throws CalculationException { } }
return model . getNumeraire ( evaluationTime ) ;
public class CATSyncAsynchReader { /** * This method will send a JsMessage back to the client in response to a synchronous receive . * If the FAP level is 9 or above the message can be sent back to the client in chunks ( rather * than as a single large message ) if the message is big enough to make this worthwhile . * @ param jsMessage The message to send * @ throws SIResourceException */ private void sendMessageToClient ( JsMessage jsMessage ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "sendMessageToClient" , jsMessage ) ; setCurrentlyDoingReceiveWithWait ( false ) ; try { // If we are at FAP9 or above we can do a ' chunked ' send of the message in seperate // slices to make life easier on the Java memory manager final HandshakeProperties props = conversation . getHandshakeProperties ( ) ; if ( props . getFapLevel ( ) >= JFapChannelConstants . FAP_VERSION_9 ) { sendChunkedMessage ( jsMessage ) ; } else { sendEntireMessage ( jsMessage , null ) ; } } catch ( Exception e ) { FFDCFilter . processException ( e , CLASS_NAME + ".sendMessageToClient" , CommsConstants . CATSYNCASYNCHREADER_SEND_MSG_03 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Encode failed: " + e . getMessage ( ) , e ) ; throw new SIResourceException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "sendMessageToClient" ) ;
public class AbstractPropertyEditor { /** * Checks if the < em > bean < / em > and the < em > value < / em > are consistent with the cardinality rules of * the model . This method is important for validations . * @ param value Value that is related to the object * @ param bean Object that is related to the value */ protected void checkRestrictions ( R value , D bean ) { } }
Integer max = this . maxCardinalities . get ( value . getClass ( ) ) ; if ( max != null ) { if ( max == 0 ) { throw new IllegalBioPAXArgumentException ( "Cardinality 0 restriction violated" ) ; } else { assert multipleCardinality ; Set values = this . getValueFromBean ( bean ) ; if ( values . size ( ) >= max ) { throw new IllegalBioPAXArgumentException ( "Cardinality " + max + " restriction violated" ) ; } } }
public class Util { /** * Check whether a particular request is expecting continue . * @ param inboundRequestMsg in question * @ return true if the request expects 100 - continue response */ public static boolean is100ContinueRequest ( HttpCarbonMessage inboundRequestMsg ) { } }
return HEADER_VAL_100_CONTINUE . equalsIgnoreCase ( inboundRequestMsg . getHeader ( HttpHeaderNames . EXPECT . toString ( ) ) ) ;
public class IdemixSignature { /** * Verify this signature * @ param disclosure an array indicating which attributes it expects to be disclosed * @ param ipk the issuer public key * @ param msg the message that should be signed in this signature * @ param attributeValues BIG array where attributeValues [ i ] contains the desired attribute value for the i - th attribute if its disclosed * @ param rhIndex index of the attribute that represents the revocation - handle * @ param revPk the long term public key used to authenticate CRIs * @ param epoch monotonically increasing counter representing a time window * @ return true iff valid */ public boolean verify ( boolean [ ] disclosure , IdemixIssuerPublicKey ipk , byte [ ] msg , BIG [ ] attributeValues , int rhIndex , PublicKey revPk , int epoch ) throws CryptoException { } }
if ( disclosure == null || ipk == null || msg == null || attributeValues == null || attributeValues . length != ipk . getAttributeNames ( ) . length || disclosure . length != ipk . getAttributeNames ( ) . length ) { return false ; } for ( int i = 0 ; i < ipk . getAttributeNames ( ) . length ; i ++ ) { if ( disclosure [ i ] && attributeValues [ i ] == null ) { return false ; } } int [ ] hiddenIndices = hiddenIndices ( disclosure ) ; if ( proofSAttrs . length != hiddenIndices . length ) { return false ; } if ( aPrime . is_infinity ( ) ) { return false ; } if ( nonRevocationProof . getRevocationAlg ( ) >= RevocationAlgorithm . values ( ) . length ) { throw new IllegalArgumentException ( "CRI specifies unknown revocation algorithm" ) ; } RevocationAlgorithm revocationAlgorithm = RevocationAlgorithm . values ( ) [ nonRevocationProof . getRevocationAlg ( ) ] ; if ( disclosure [ rhIndex ] ) { throw new IllegalArgumentException ( "Attribute " + rhIndex + " is disclosed but also used a revocation handle attribute, which should remain hidden" ) ; } // Verify EpochPK if ( ! RevocationAuthority . verifyEpochPK ( revPk , this . revocationPk , this . revocationPKSig , epoch , revocationAlgorithm ) ) { // Signature is based on an invalid revocation epoch public key return false ; } FP12 temp1 = PAIR . ate ( ipk . getW ( ) , aPrime ) ; FP12 temp2 = PAIR . ate ( IdemixUtils . genG2 , aBar ) ; temp2 . inverse ( ) ; temp1 . mul ( temp2 ) ; if ( ! PAIR . fexp ( temp1 ) . isunity ( ) ) { return false ; } ECP t1 = aPrime . mul2 ( proofSE , ipk . getHRand ( ) , proofSR2 ) ; ECP temp = new ECP ( ) ; temp . copy ( aBar ) ; temp . sub ( bPrime ) ; t1 . sub ( PAIR . G1mul ( temp , proofC ) ) ; ECP t2 = PAIR . G1mul ( ipk . getHRand ( ) , proofSSPrime ) ; t2 . add ( bPrime . mul2 ( proofSR3 , ipk . getHsk ( ) , proofSSk ) ) ; for ( int i = 0 ; i < hiddenIndices . length / 2 ; i ++ ) { t2 . add ( ipk . getHAttrs ( ) [ hiddenIndices [ 2 * i ] ] . mul2 ( proofSAttrs [ 2 * i ] , ipk . getHAttrs ( ) [ hiddenIndices [ 2 * i + 1 ] ] , proofSAttrs [ 2 * i + 1 ] ) ) ; } if ( hiddenIndices . length % 2 != 0 ) { t2 . add ( PAIR . G1mul ( ipk . getHAttrs ( ) [ hiddenIndices [ hiddenIndices . length - 1 ] ] , proofSAttrs [ hiddenIndices . length - 1 ] ) ) ; } temp = new ECP ( ) ; temp . copy ( IdemixUtils . genG1 ) ; for ( int i = 0 ; i < disclosure . length ; i ++ ) { if ( disclosure [ i ] ) { temp . add ( PAIR . G1mul ( ipk . getHAttrs ( ) [ i ] , attributeValues [ i ] ) ) ; } } t2 . add ( PAIR . G1mul ( temp , proofC ) ) ; ECP t3 = ipk . getHsk ( ) . mul2 ( proofSSk , ipk . getHRand ( ) , proofSRNym ) ; t3 . sub ( nym . mul ( proofC ) ) ; // Check with non - revoked - verifier NonRevocationVerifier nonRevokedVerifier = NonRevocationVerifier . getNonRevocationVerifier ( revocationAlgorithm ) ; int hiddenRHIndex = Ints . indexOf ( hiddenIndices , rhIndex ) ; if ( hiddenRHIndex < 0 ) { // rhIndex is not present , set to last index position hiddenRHIndex = hiddenIndices . length ; } BIG proofSRh = proofSAttrs [ hiddenRHIndex ] ; byte [ ] nonRevokedProofBytes = nonRevokedVerifier . recomputeFSContribution ( this . nonRevocationProof , proofC , IdemixUtils . transformFromProto ( this . revocationPk ) , proofSRh ) ; if ( nonRevokedProofBytes == null ) { return false ; } // create proofData such that it can contain the sign label , 7 elements in G1 ( each of size 2 * FIELD _ BYTES + 1 ) , // the ipk hash , the disclosure array , and the message byte [ ] proofData = new byte [ 0 ] ; proofData = IdemixUtils . append ( proofData , SIGN_LABEL . getBytes ( ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( t1 ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( t2 ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( t3 ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( aPrime ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( aBar ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( bPrime ) ) ; proofData = IdemixUtils . append ( proofData , IdemixUtils . ecpToBytes ( nym ) ) ; proofData = IdemixUtils . append ( proofData , ipk . getHash ( ) ) ; proofData = IdemixUtils . append ( proofData , disclosure ) ; proofData = IdemixUtils . append ( proofData , msg ) ; BIG cvalue = IdemixUtils . hashModOrder ( proofData ) ; byte [ ] finalProofData = new byte [ 0 ] ; finalProofData = IdemixUtils . append ( finalProofData , IdemixUtils . bigToBytes ( cvalue ) ) ; finalProofData = IdemixUtils . append ( finalProofData , IdemixUtils . bigToBytes ( nonce ) ) ; byte [ ] hashedProofData = IdemixUtils . bigToBytes ( IdemixUtils . hashModOrder ( finalProofData ) ) ; return Arrays . equals ( IdemixUtils . bigToBytes ( proofC ) , hashedProofData ) ;
public class CommandLine { /** * Sets whether options for single - value fields can be specified multiple times on the command line without a { @ link OverwrittenOptionException } being thrown . * The default is { @ code false } . * < p > The specified setting will be registered with this { @ code CommandLine } and the full hierarchy of its * subcommands and nested sub - subcommands < em > at the moment this method is called < / em > . Subcommands added * later will have the default setting . To ensure a setting is applied to all * subcommands , call the setter last , after adding subcommands . < / p > * @ param newValue the new setting * @ return this { @ code CommandLine } object , to allow method chaining * @ since 0.9.7 */ public CommandLine setOverwrittenOptionsAllowed ( boolean newValue ) { } }
getCommandSpec ( ) . parser ( ) . overwrittenOptionsAllowed ( newValue ) ; for ( CommandLine command : getCommandSpec ( ) . subcommands ( ) . values ( ) ) { command . setOverwrittenOptionsAllowed ( newValue ) ; } return this ;
public class AliPayApi { /** * 地铁购票发码退款 * @ param model * { AlipayCommerceCityfacilitatorVoucherRefundModel } * @ return { AlipayCommerceCityfacilitatorVoucherRefundResponse } * @ throws { AlipayApiException } */ public static AlipayCommerceCityfacilitatorVoucherRefundResponse metroRefundToResponse ( AlipayCommerceCityfacilitatorVoucherRefundModel model ) throws AlipayApiException { } }
AlipayCommerceCityfacilitatorVoucherRefundRequest request = new AlipayCommerceCityfacilitatorVoucherRefundRequest ( ) ; request . setBizModel ( model ) ; return AliPayApiConfigKit . getAliPayApiConfig ( ) . getAlipayClient ( ) . execute ( request ) ;
public class PropertyHelper { /** * Replaces Ant - style property references if the corresponding keys exist in the provided { @ link Properties } . * @ param props contains possible replacements * @ param original may contain Ant - style templates * @ return the original string with replaced properties or the unchanged original string if no placeholder found . */ public static String resolveProperty ( Properties props , String original ) { } }
Matcher matcher = PROPERTY_TEMPLATE . matcher ( original ) ; StringBuilder buffer = new StringBuilder ( ) ; boolean found = false ; while ( matcher . find ( ) ) { found = true ; String propertyName = matcher . group ( 2 ) . trim ( ) ; buffer . append ( matcher . group ( 1 ) ) . append ( props . containsKey ( propertyName ) ? props . getProperty ( propertyName ) : "" ) . append ( matcher . group ( 3 ) ) ; } return found ? buffer . toString ( ) : original ;
public class AmazonRedshiftClient { /** * Deletes the specified snapshot copy grant . * @ param deleteSnapshotCopyGrantRequest * The result of the < code > DeleteSnapshotCopyGrant < / code > action . * @ return Result of the DeleteSnapshotCopyGrant operation returned by the service . * @ throws InvalidSnapshotCopyGrantStateException * The snapshot copy grant can ' t be deleted because it is used by one or more clusters . * @ throws SnapshotCopyGrantNotFoundException * The specified snapshot copy grant can ' t be found . Make sure that the name is typed correctly and that the * grant exists in the destination region . * @ sample AmazonRedshift . DeleteSnapshotCopyGrant * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / DeleteSnapshotCopyGrant " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteSnapshotCopyGrantResult deleteSnapshotCopyGrant ( DeleteSnapshotCopyGrantRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteSnapshotCopyGrant ( request ) ;
public class S2SProposalPersonServiceImpl { /** * Finds all the key Person associated with the provided pdDoc . */ @ Override public List < ProposalPersonContract > getKeyPersons ( ProposalDevelopmentDocumentContract pdDoc ) { } }
List < ProposalPersonContract > keyPersons = new ArrayList < > ( ) ; if ( pdDoc != null ) { for ( ProposalPersonContract person : pdDoc . getDevelopmentProposal ( ) . getProposalPersons ( ) ) { if ( person . isKeyPerson ( ) ) { keyPersons . add ( person ) ; } } } return keyPersons ;
public class SignedJarBuilder { /** * Writes a . SF file with a digest to the manifest . */ private byte [ ] getSignature ( final Manifest forManifest ) throws IOException , GeneralSecurityException { } }
final ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; final Manifest signatureFile = new Manifest ( ) ; final Attributes main = signatureFile . getMainAttributes ( ) ; final MessageDigest md = MessageDigest . getInstance ( "SHA1" ) ; final PrintStream print = new PrintStream ( new DigestOutputStream ( new ByteArrayOutputStream ( ) , md ) , true , "UTF-8" ) ; main . putValue ( "Signature-Version" , "1.0" ) ; // Digest of the entire manifest forManifest . write ( print ) ; print . flush ( ) ; main . putValue ( "SHA1-Digest-Manifest" , new String ( Base64 . encode ( md . digest ( ) ) , "ASCII" ) ) ; final Map < String , Attributes > entries = forManifest . getEntries ( ) ; for ( Map . Entry < String , Attributes > entry : entries . entrySet ( ) ) { // Digest of the manifest stanza for this entry . print . print ( "Name: " + entry . getKey ( ) + "\r\n" ) ; for ( Map . Entry < Object , Object > att : entry . getValue ( ) . entrySet ( ) ) { print . print ( att . getKey ( ) + ": " + att . getValue ( ) + "\r\n" ) ; } print . print ( "\r\n" ) ; print . flush ( ) ; final Attributes sfAttr = new Attributes ( ) ; sfAttr . putValue ( "SHA1-Digest" , new String ( Base64 . encode ( md . digest ( ) ) , "ASCII" ) ) ; signatureFile . getEntries ( ) . put ( entry . getKey ( ) , sfAttr ) ; } signatureFile . write ( bos ) ; return bos . toByteArray ( ) ;
public class CeylonRepoReader { /** * / * ( non - Javadoc ) * @ see org . eclipse . aether . repository . WorkspaceReader # findArtifact ( org . eclipse . aether . artifact . Artifact ) */ @ Override public File findArtifact ( final Artifact artifact ) { } }
String type = artifact . getProperty ( "type" , "jar" ) ; if ( "ceylon-jar" . equals ( type ) || "car" . equals ( type ) ) { if ( "ceylon-jar" . equals ( type ) ) { type = "jar" ; } File art = new File ( CeylonUtil . ceylonSystemFullPath ( artifact , type ) ) ; if ( art . isFile ( ) ) { if ( logger != null ) { logger . info ( "Resolved from Ceylon repo: " + artifact ) ; } artifact . setFile ( art ) ; return art ; } } return null ;
public class cachecontentgroup { /** * Use this API to add cachecontentgroup resources . */ public static base_responses add ( nitro_service client , cachecontentgroup resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { cachecontentgroup addresources [ ] = new cachecontentgroup [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new cachecontentgroup ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . weakposrelexpiry = resources [ i ] . weakposrelexpiry ; addresources [ i ] . heurexpiryparam = resources [ i ] . heurexpiryparam ; addresources [ i ] . relexpiry = resources [ i ] . relexpiry ; addresources [ i ] . relexpirymillisec = resources [ i ] . relexpirymillisec ; addresources [ i ] . absexpiry = resources [ i ] . absexpiry ; addresources [ i ] . absexpirygmt = resources [ i ] . absexpirygmt ; addresources [ i ] . weaknegrelexpiry = resources [ i ] . weaknegrelexpiry ; addresources [ i ] . hitparams = resources [ i ] . hitparams ; addresources [ i ] . invalparams = resources [ i ] . invalparams ; addresources [ i ] . ignoreparamvaluecase = resources [ i ] . ignoreparamvaluecase ; addresources [ i ] . matchcookies = resources [ i ] . matchcookies ; addresources [ i ] . invalrestrictedtohost = resources [ i ] . invalrestrictedtohost ; addresources [ i ] . polleverytime = resources [ i ] . polleverytime ; addresources [ i ] . ignorereloadreq = resources [ i ] . ignorereloadreq ; addresources [ i ] . removecookies = resources [ i ] . removecookies ; addresources [ i ] . prefetch = resources [ i ] . prefetch ; addresources [ i ] . prefetchperiod = resources [ i ] . prefetchperiod ; addresources [ i ] . prefetchperiodmillisec = resources [ i ] . prefetchperiodmillisec ; addresources [ i ] . prefetchmaxpending = resources [ i ] . prefetchmaxpending ; addresources [ i ] . flashcache = resources [ i ] . flashcache ; addresources [ i ] . expireatlastbyte = resources [ i ] . expireatlastbyte ; addresources [ i ] . insertvia = resources [ i ] . insertvia ; addresources [ i ] . insertage = resources [ i ] . insertage ; addresources [ i ] . insertetag = resources [ i ] . insertetag ; addresources [ i ] . cachecontrol = resources [ i ] . cachecontrol ; addresources [ i ] . quickabortsize = resources [ i ] . quickabortsize ; addresources [ i ] . minressize = resources [ i ] . minressize ; addresources [ i ] . maxressize = resources [ i ] . maxressize ; addresources [ i ] . memlimit = resources [ i ] . memlimit ; addresources [ i ] . ignorereqcachinghdrs = resources [ i ] . ignorereqcachinghdrs ; addresources [ i ] . minhits = resources [ i ] . minhits ; addresources [ i ] . alwaysevalpolicies = resources [ i ] . alwaysevalpolicies ; addresources [ i ] . persist = resources [ i ] . persist ; addresources [ i ] . pinned = resources [ i ] . pinned ; addresources [ i ] . lazydnsresolve = resources [ i ] . lazydnsresolve ; addresources [ i ] . hitselector = resources [ i ] . hitselector ; addresources [ i ] . invalselector = resources [ i ] . invalselector ; addresources [ i ] . type = resources [ i ] . type ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class CmsImageFormatHandler { /** * Execute on format change . < p > * @ param formatKey the new format value */ public void onFormatChange ( String formatKey ) { } }
// setting the selected format restriction m_currentFormat = m_formats . get ( formatKey ) ; m_currentFormat . adjustCroppingParam ( m_croppingParam ) ; adjustToCurrentFormat ( ) ; if ( m_initialized ) { // fire change only if initialized fireValueChangedEvent ( ) ; }
public class Topics { /** * Subscribes an existing Amazon SQS queue to an existing Amazon SNS topic . * The specified Amazon SNS client will be used to send the subscription * request , and the Amazon SQS client will be used to modify the policy on * the queue to allow it to receive messages from the SNS topic . * The policy applied to the SQS queue is similar to this : * < pre > * " Version " : " 2008-10-17 " , * " Statement " : [ { * " Sid " : " topic - subscription - arn : aws : sns : us - west - 2:599109622955 : myTopic " , * " Effect " : " Allow " , * " Principal " : { * " AWS " : [ " * " ] * " Action " : [ " sqs : SendMessage " ] , * " Resource " : [ " arn : aws : sqs : us - west - 2:599109622955 : myQueue " ] , * " Condition " : { * " ArnLike " : { * " aws : SourceArn " : [ " arn : aws : sns : us - west - 2:599109622955 : myTopic " ] * < / pre > * < b > IMPORTANT < / b > : There might be a small time period immediately after * subscribing the SQS queue to the SNS topic and updating the SQS queue ' s * policy , where messages are not able to be delivered to the queue . After a * moment , the new queue policy will propagate and the queue will be able to * receive messages . This delay only occurs immediately after initially * subscribing the queue . * < b > IMPORTANT < / b > : The specified queue and topic ( as well as the SNS and * SQS client ) should both be located in the same AWS region . * @ param sns * The Amazon SNS client to use when subscribing the queue to the * topic . * @ param sqs * The Amazon SQS client to use when applying the policy to allow * subscribing to the topic . * @ param snsTopicArn * The Amazon Resource Name ( ARN ) uniquely identifying the Amazon * SNS topic . This value is returned form Amazon SNS when * creating the topic . * @ param sqsQueueUrl * The URL uniquely identifying the Amazon SQS queue . This value * is returned from Amazon SQS when creating the queue . * @ param extendPolicy * Decides behavior to overwrite the existing policy or extend it . * @ throws AmazonClientException * If any internal errors are encountered inside the client * while attempting to make the request or handle the response . * For example if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonSNS indicating * either a problem with the data in the request , or a server * side issue . * @ return The subscription ARN as returned by Amazon SNS when the queue is * successfully subscribed to the topic . */ public static String subscribeQueue ( AmazonSNS sns , AmazonSQS sqs , String snsTopicArn , String sqsQueueUrl , boolean extendPolicy ) throws AmazonClientException , AmazonServiceException { } }
List < String > sqsAttrNames = Arrays . asList ( QueueAttributeName . QueueArn . toString ( ) , QueueAttributeName . Policy . toString ( ) ) ; Map < String , String > sqsAttrs = sqs . getQueueAttributes ( sqsQueueUrl , sqsAttrNames ) . getAttributes ( ) ; String sqsQueueArn = sqsAttrs . get ( QueueAttributeName . QueueArn . toString ( ) ) ; String policyJson = sqsAttrs . get ( QueueAttributeName . Policy . toString ( ) ) ; Policy policy = extendPolicy && policyJson != null && policyJson . length ( ) > 0 ? Policy . fromJson ( policyJson ) : new Policy ( ) ; policy . getStatements ( ) . add ( new Statement ( Effect . Allow ) . withId ( "topic-subscription-" + snsTopicArn ) . withPrincipals ( Principal . AllUsers ) . withActions ( SQSActions . SendMessage ) . withResources ( new Resource ( sqsQueueArn ) ) . withConditions ( ConditionFactory . newSourceArnCondition ( snsTopicArn ) ) ) ; Map < String , String > newAttrs = new HashMap < String , String > ( ) ; newAttrs . put ( QueueAttributeName . Policy . toString ( ) , policy . toJson ( ) ) ; sqs . setQueueAttributes ( new SetQueueAttributesRequest ( sqsQueueUrl , newAttrs ) ) ; SubscribeResult subscribeResult = sns . subscribe ( snsTopicArn , "sqs" , sqsQueueArn ) ; return subscribeResult . getSubscriptionArn ( ) ;
public class P3 { /** * The processor for extracting directives . * @ param name * property name , unused . * @ param source * the directives . * @ throws Exception * when there is a problem . */ @ SuppressWarnings ( "unused" ) private void executeProgram ( String name , String [ ] source ) throws Exception { } }
String _singleStringSource = executeProgram__makeSingleStringSource ( source ) ; executeProgram ( name , _singleStringSource ) ;
public class SyncAgentsInner { /** * Lists sync agents in a server . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server on which the sync agent is hosted . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SyncAgentInner & gt ; object */ public Observable < Page < SyncAgentInner > > listByServerAsync ( final String resourceGroupName , final String serverName ) { } }
return listByServerWithServiceResponseAsync ( resourceGroupName , serverName ) . map ( new Func1 < ServiceResponse < Page < SyncAgentInner > > , Page < SyncAgentInner > > ( ) { @ Override public Page < SyncAgentInner > call ( ServiceResponse < Page < SyncAgentInner > > response ) { return response . body ( ) ; } } ) ;
public class InChINumbersTools { /** * Parse the atom numbering from the auxinfo . * @ param aux InChI AuxInfo * @ param numbers the atom numbers */ public static void parseAuxInfo ( String aux , long [ ] numbers ) { } }
aux = aux . substring ( aux . indexOf ( "/N:" ) + 3 ) ; String numberStringAux = aux . substring ( 0 , aux . indexOf ( '/' ) ) ; int i = 1 ; for ( String numberString : numberStringAux . split ( "[,;]" ) ) numbers [ Integer . valueOf ( numberString ) - 1 ] = i ++ ;
public class FaceListsImpl { /** * Add a face to a face list . The input face is specified as an image with a targetFace rectangle . It returns a persistedFaceId representing the added face , and persistedFaceId will not expire . * @ param faceListId Id referencing a particular face list . * @ param image An image stream . * @ param addFaceFromStreamOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PersistedFace object */ public Observable < ServiceResponse < PersistedFace > > addFaceFromStreamWithServiceResponseAsync ( String faceListId , byte [ ] image , AddFaceFromStreamOptionalParameter addFaceFromStreamOptionalParameter ) { } }
if ( this . client . azureRegion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.azureRegion() is required and cannot be null." ) ; } if ( faceListId == null ) { throw new IllegalArgumentException ( "Parameter faceListId is required and cannot be null." ) ; } if ( image == null ) { throw new IllegalArgumentException ( "Parameter image is required and cannot be null." ) ; } final String userData = addFaceFromStreamOptionalParameter != null ? addFaceFromStreamOptionalParameter . userData ( ) : null ; final List < Integer > targetFace = addFaceFromStreamOptionalParameter != null ? addFaceFromStreamOptionalParameter . targetFace ( ) : null ; return addFaceFromStreamWithServiceResponseAsync ( faceListId , image , userData , targetFace ) ;
public class JKCompileUtil { /** * Compile java class . * @ param sourceCode the source code * @ throws IOException */ public static boolean compileJavaClass ( String sourceCode ) { } }
try { String fileName = getClassName ( sourceCode ) . concat ( ".java" ) ; logger . info ( "Compiling Java Class ({})" , fileName ) ; File rootDir = JKIOUtil . createTempDirectory ( ) ; String packageDir = getPackageDir ( sourceCode ) ; File sourceFile ; if ( packageDir != null ) { File file = new File ( rootDir , packageDir ) ; file . mkdirs ( ) ; sourceFile = new File ( file , fileName ) ; } else { sourceFile = new File ( rootDir , fileName ) ; } JKIOUtil . writeDataToFile ( sourceCode , sourceFile ) ; // Compile source file . JavaCompiler compiler = ToolProvider . getSystemJavaCompiler ( ) ; StandardJavaFileManager standardJavaFileManager = compiler . getStandardFileManager ( null , null , null ) ; standardJavaFileManager . setLocation ( StandardLocation . CLASS_PATH , getClassPath ( ) ) ; standardJavaFileManager . setLocation ( StandardLocation . SOURCE_PATH , Arrays . asList ( rootDir ) ) ; List < String > options = new ArrayList < String > ( ) ; options . add ( "-Xlint:unchecked" ) ; CompilationTask compilationTask = compiler . getTask ( null , standardJavaFileManager , null , options , null , standardJavaFileManager . getJavaFileObjectsFromFiles ( JK . toList ( sourceFile ) ) ) ; return compilationTask . call ( ) ; } catch ( IOException e ) { JK . throww ( e ) ; return false ; } // if ( compiler . run ( System . in , System . out , System . err , sourceFile . getPath ( ) ) ! = 0 ) { // JK . error ( " Compilation failed , check stack trace " ) ;
public class BuiltinIntentMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BuiltinIntentMetadata builtinIntentMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( builtinIntentMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( builtinIntentMetadata . getSignature ( ) , SIGNATURE_BINDING ) ; protocolMarshaller . marshall ( builtinIntentMetadata . getSupportedLocales ( ) , SUPPORTEDLOCALES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LocalQueue { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . local . destination . LocalDestinationMBean # resetStats ( ) */ @ Override public void resetStats ( ) { } }
super . resetStats ( ) ; sentToQueueCount . set ( 0 ) ; receivedFromQueueCount . set ( 0 ) ; acknowledgedGetCount . set ( 0 ) ; rollbackedGetCount . set ( 0 ) ; expiredCount . set ( 0 ) ;
public class QueryLexer { /** * $ ANTLR start " T _ _ 36" */ public final void mT__36 ( ) throws RecognitionException { } }
try { int _type = T__36 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / riemann / Query . g : 28:7 : ( ' metric ' ) // src / riemann / Query . g : 28:9 : ' metric ' { match ( "metric" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class Element { /** * Executes JavaScript code on the current element in the current frame or * window . * @ param javascript * the javascript code to be executed */ @ Override public void eval ( String javascript ) throws WidgetException { } }
WebElement element = findElement ( false ) ; WebDriver wd = getGUIDriver ( ) . getWrappedDriver ( ) ; try { ( ( JavascriptExecutor ) wd ) . executeScript ( javascript , element ) ; } catch ( Exception e ) { long time = System . currentTimeMillis ( ) + 2000 ; boolean success = false ; while ( ! success && System . currentTimeMillis ( ) < time ) { try { ( ( JavascriptExecutor ) wd ) . executeScript ( javascript , element ) ; success = true ; } catch ( Exception e2 ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e1 ) { // Ignore } e = e2 ; } } if ( ! success ) { throw new RuntimeException ( e ) ; } }
public class AWSSimpleSystemsManagementClient { /** * Use this API action to view all executions for a specific association ID . * @ param describeAssociationExecutionsRequest * @ return Result of the DescribeAssociationExecutions operation returned by the service . * @ throws InternalServerErrorException * An error occurred on the server side . * @ throws AssociationDoesNotExistException * The specified association does not exist . * @ throws InvalidNextTokenException * The specified token is not valid . * @ sample AWSSimpleSystemsManagement . DescribeAssociationExecutions * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / DescribeAssociationExecutions " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeAssociationExecutionsResult describeAssociationExecutions ( DescribeAssociationExecutionsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeAssociationExecutions ( request ) ;
public class HadoopInputs { /** * Creates a Flink { @ link InputFormat } that wraps the given Hadoop { @ link org . apache . hadoop . mapred . InputFormat } . * @ return A Flink InputFormat that wraps the Hadoop InputFormat . */ public static < K , V > HadoopInputFormat < K , V > createHadoopInput ( org . apache . hadoop . mapred . InputFormat < K , V > mapredInputFormat , Class < K > key , Class < V > value , JobConf job ) { } }
return new HadoopInputFormat < > ( mapredInputFormat , key , value , job ) ;
public class StreamsUtils { /** * < p > Generates a stream identical to the provided stream until the interruptor predicate is false for one element . * At that time , the returned stream stops . < / p > * < p > A < code > NullPointerException < / code > will be thrown if the provided stream of the interruptor predicate is null . < / p > * < p > If you are using Java 9 , then yo should use < code > Stream . takeWhile ( Predicate ) < / code > . < / p > * @ param stream the input stream . Will throw a < code > NullPointerException < / code > if < code > null < / code > . * @ param interruptor the predicate applied to the elements of the input stream . * Will throw a < code > NullPointerException < / code > if < code > null < / code > . * @ param < E > the type of the stream and the returned stream . * @ return a stream that is a copy of the input stream , until the interruptor becomes false . * @ deprecated Java 9 added the * < a href = " https : / / docs . oracle . com / javase / 9 / docs / api / java / util / stream / Stream . html # takeWhile - java . util . function . Predicate - " > * Stream . takeWhile ( Predicate ) < / a > method that does the same . */ @ Deprecated ( since = "2.0" ) public static < E > Stream < E > interrupt ( Stream < E > stream , Predicate < ? super E > interruptor ) { } }
Objects . requireNonNull ( stream ) ; Objects . requireNonNull ( interruptor ) ; InterruptingSpliterator < E > spliterator = InterruptingSpliterator . of ( stream . spliterator ( ) , interruptor ) ; return StreamSupport . stream ( spliterator , stream . isParallel ( ) ) . onClose ( stream :: close ) ;
public class SourceCode { /** * Gibt zurueck ob ein Wert folgt und vor und hinterher Leerzeichen folgen . * @ param before Definition der Leerzeichen vorher . * @ param val Gefolgter Wert der erartet wird . * @ param after Definition der Leerzeichen nach dem Wert . * @ return Gibt zurueck ob der Zeiger vorwaerts geschoben wurde oder nicht . */ public boolean forwardIfCurrent ( short before , String val , short after ) { } }
int start = pos ; // space before if ( before == AT_LEAST_ONE_SPACE ) { if ( ! removeSpace ( ) ) return false ; } else removeSpace ( ) ; // value if ( ! forwardIfCurrent ( val ) ) { setPos ( start ) ; return false ; } // space after if ( after == AT_LEAST_ONE_SPACE ) { if ( ! removeSpace ( ) ) { setPos ( start ) ; return false ; } } else removeSpace ( ) ; return true ;
public class ClassUtils { /** * < p > Gets the class name minus the package name from a { @ code Class } . < / p > * < p > Consider using the Java 5 API { @ link Class # getSimpleName ( ) } instead . * The one known difference is that this code will return { @ code " Map . Entry " } while * the { @ code java . lang . Class } variant will simply return { @ code " Entry " } . < / p > * @ param cls the class to get the short name for . * @ return the class name without the package name or an empty string */ public static String getShortClassName ( final Class < ? > cls ) { } }
if ( cls == null ) { return StringUtils . EMPTY ; } return getShortClassName ( cls . getName ( ) ) ;
public class DescribeNatGatewaysRequest { /** * One or more NAT gateway IDs . * @ return One or more NAT gateway IDs . */ public java . util . List < String > getNatGatewayIds ( ) { } }
if ( natGatewayIds == null ) { natGatewayIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return natGatewayIds ;
public class CmsResourceTypeStatResultList { /** * Sets the layout . < p > * @ param layout to display the result in * @ param addAll indicates if the whole list should be added or just the last item */ public void setVerticalLayout ( VerticalLayout layout , boolean addAll ) { } }
if ( m_results . size ( ) > 0 ) { if ( addAll ) { for ( CmsResourceTypeStatResult result : m_results ) { layout . addComponent ( getLayoutFromResult ( result ) , 0 ) ; } } else { CmsResourceTypeStatResult statResult = m_results . get ( m_results . size ( ) - 1 ) ; if ( m_updated ) { removeRow ( layout , statResult ) ; } layout . addComponent ( getLayoutFromResult ( statResult ) , 0 ) ; } }
public class SerTypeMapper { /** * Encodes a basic class . * This handles known simple types , like String , Integer or File , and prefixing . * It also allows a map of message specific shorter forms . * @ param cls the class to encode , not null * @ param settings the settings object , not null * @ param basePackage the base package to use with trailing dot , null if none * @ param knownTypes the known types map , null if not using known type shortening * @ return the class object , null if not a basic type */ public static String encodeType ( Class < ? > cls , final JodaBeanSer settings , final String basePackage , final Map < Class < ? > , String > knownTypes ) { } }
// basic type String result = BASIC_TYPES . get ( cls ) ; if ( result != null ) { return result ; } // handle enum subclasses Class < ? > supr1 = cls . getSuperclass ( ) ; if ( supr1 != null ) { Class < ? > supr2 = supr1 . getSuperclass ( ) ; if ( supr2 == Enum . class ) { cls = supr1 ; } } // calculate if ( settings . isShortTypes ( ) ) { if ( knownTypes != null ) { result = knownTypes . get ( cls ) ; if ( result != null ) { return result ; } } result = cls . getName ( ) ; if ( basePackage != null && result . startsWith ( basePackage ) && Character . isUpperCase ( result . charAt ( basePackage . length ( ) ) ) && BASIC_TYPES_REVERSED . containsKey ( result . substring ( basePackage . length ( ) ) ) == false ) { // use short format result = result . substring ( basePackage . length ( ) ) ; if ( knownTypes != null ) { knownTypes . put ( cls , result ) ; } } else { // use long format , short next time if possible if ( knownTypes != null ) { String simpleName = cls . getSimpleName ( ) ; if ( Character . isUpperCase ( simpleName . charAt ( 0 ) ) && BASIC_TYPES_REVERSED . containsKey ( simpleName ) == false && knownTypes . containsValue ( simpleName ) == false ) { knownTypes . put ( cls , simpleName ) ; } else { knownTypes . put ( cls , result ) ; } } } } else { result = cls . getName ( ) ; } return result ;
public class StopDataCollectionByAgentIdsResult { /** * Information about the agents or connector that were instructed to stop collecting data . Information includes the * agent / connector ID , a description of the operation performed , and whether the agent / connector configuration was * updated . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAgentsConfigurationStatus ( java . util . Collection ) } or * { @ link # withAgentsConfigurationStatus ( java . util . Collection ) } if you want to override the existing values . * @ param agentsConfigurationStatus * Information about the agents or connector that were instructed to stop collecting data . Information * includes the agent / connector ID , a description of the operation performed , and whether the agent / connector * configuration was updated . * @ return Returns a reference to this object so that method calls can be chained together . */ public StopDataCollectionByAgentIdsResult withAgentsConfigurationStatus ( AgentConfigurationStatus ... agentsConfigurationStatus ) { } }
if ( this . agentsConfigurationStatus == null ) { setAgentsConfigurationStatus ( new java . util . ArrayList < AgentConfigurationStatus > ( agentsConfigurationStatus . length ) ) ; } for ( AgentConfigurationStatus ele : agentsConfigurationStatus ) { this . agentsConfigurationStatus . add ( ele ) ; } return this ;
public class Vector2d { /** * Read this vector from the supplied { @ link DoubleBuffer } starting at the specified * absolute buffer position / index . * This method will not increment the position of the given DoubleBuffer . * @ param index * the absolute position into the DoubleBuffer * @ param buffer * values will be read in < code > x , y < / code > order * @ return this */ public Vector2d set ( int index , DoubleBuffer buffer ) { } }
MemUtil . INSTANCE . get ( this , index , buffer ) ; return this ;
public class PrcCheckOut { /** * < p > It makes order line . < / p > * @ param pRqVs Request scoped Vars * @ param pOrders Orders * @ param pItPl item place * @ param pCartLn Cart Line * @ param pTs trading settings * @ throws Exception an Exception */ public final void makeOrdLn ( final Map < String , Object > pRqVs , final List < CustOrder > pOrders , final AItemPlace < ? , ? > pItPl , final CartLn pCartLn , final TradingSettings pTs ) throws Exception { } }
CustOrder cuOr = null ; boolean isNdOrInit = true ; for ( CustOrder co : pOrders ) { if ( co . getCurr ( ) != null ) { cuOr = co ; isNdOrInit = false ; break ; } } if ( cuOr == null ) { cuOr = new CustOrder ( ) ; cuOr . setIsNew ( true ) ; cuOr . setTaxes ( new ArrayList < CustOrderTxLn > ( ) ) ; cuOr . setGoods ( new ArrayList < CustOrderGdLn > ( ) ) ; cuOr . setServs ( new ArrayList < CustOrderSrvLn > ( ) ) ; pOrders . add ( cuOr ) ; } if ( isNdOrInit ) { cuOr . setDat ( new Date ( ) ) ; cuOr . setStat ( EOrdStat . NEW ) ; cuOr . setDeliv ( pCartLn . getItsOwner ( ) . getDeliv ( ) ) ; cuOr . setPayMeth ( pCartLn . getItsOwner ( ) . getPayMeth ( ) ) ; cuOr . setBuyer ( pCartLn . getItsOwner ( ) . getBuyer ( ) ) ; // TODO method " pickup by buyer from several places " // cuOr . setPlace ( pItPl . getPickUpPlace ( ) ) ; cuOr . setPur ( pCartLn . getItsOwner ( ) . getItsVersion ( ) ) ; cuOr . setCurr ( pCartLn . getItsOwner ( ) . getCurr ( ) ) ; cuOr . setExcRt ( pCartLn . getItsOwner ( ) . getExcRt ( ) ) ; cuOr . setDescr ( pCartLn . getItsOwner ( ) . getDescr ( ) ) ; } pRqVs . put ( CartItTxLn . class . getSimpleName ( ) + "itsOwnerdeepLevel" , 1 ) ; pRqVs . put ( CartItTxLn . class . getSimpleName ( ) + "taxdeepLevel" , 1 ) ; List < CartItTxLn > citls = getSrvOrm ( ) . retrieveListWithConditions ( pRqVs , CartItTxLn . class , "where DISAB=0 and ITSOWNER=" + pCartLn . getItsId ( ) ) ; pRqVs . remove ( CartItTxLn . class . getSimpleName ( ) + "itsOwnerdeepLevel" ) ; pRqVs . remove ( CartItTxLn . class . getSimpleName ( ) + "taxdeepLevel" ) ; ACustOrderLn ol ; if ( pCartLn . getItTyp ( ) . equals ( EShopItemType . GOODS ) ) { CustOrderGdLn ogl = null ; if ( ! cuOr . getIsNew ( ) ) { for ( CustOrderGdLn gl : cuOr . getGoods ( ) ) { if ( gl . getGood ( ) == null ) { ogl = gl ; break ; } } } if ( ogl == null ) { ogl = new CustOrderGdLn ( ) ; ogl . setIsNew ( true ) ; cuOr . getGoods ( ) . add ( ogl ) ; } InvItem gd = new InvItem ( ) ; gd . setItsId ( pCartLn . getItId ( ) ) ; gd . setItsName ( pCartLn . getItsName ( ) ) ; ogl . setGood ( gd ) ; if ( citls . size ( ) > 0 ) { if ( ogl . getIsNew ( ) ) { ogl . setItTxs ( new ArrayList < CuOrGdTxLn > ( ) ) ; } for ( CartItTxLn citl : citls ) { CuOrGdTxLn oitl = null ; if ( ! cuOr . getIsNew ( ) ) { for ( CuOrGdTxLn itl : ogl . getItTxs ( ) ) { if ( itl . getTax ( ) == null ) { oitl = itl ; break ; } } } if ( oitl == null ) { oitl = new CuOrGdTxLn ( ) ; oitl . setIsNew ( true ) ; ogl . getItTxs ( ) . add ( oitl ) ; } oitl . setItsOwner ( ogl ) ; Tax tx = new Tax ( ) ; tx . setItsId ( citl . getTax ( ) . getItsId ( ) ) ; tx . setItsName ( citl . getTax ( ) . getItsName ( ) ) ; oitl . setTax ( tx ) ; oitl . setTot ( citl . getTot ( ) ) ; } } ol = ogl ; } else { CustOrderSrvLn osl = null ; if ( ! cuOr . getIsNew ( ) ) { for ( CustOrderSrvLn sl : cuOr . getServs ( ) ) { if ( sl . getService ( ) == null ) { osl = sl ; break ; } } } if ( osl == null ) { osl = new CustOrderSrvLn ( ) ; osl . setIsNew ( true ) ; cuOr . getServs ( ) . add ( osl ) ; } ServiceToSale sr = new ServiceToSale ( ) ; sr . setItsId ( pCartLn . getItId ( ) ) ; sr . setItsName ( pCartLn . getItsName ( ) ) ; osl . setService ( sr ) ; osl . setDt1 ( pCartLn . getDt1 ( ) ) ; osl . setDt2 ( pCartLn . getDt2 ( ) ) ; if ( citls . size ( ) > 0 ) { if ( osl . getIsNew ( ) ) { osl . setItTxs ( new ArrayList < CuOrSrTxLn > ( ) ) ; } for ( CartItTxLn citl : citls ) { CuOrSrTxLn oitl = null ; if ( ! cuOr . getIsNew ( ) ) { for ( CuOrSrTxLn itl : osl . getItTxs ( ) ) { if ( itl . getTax ( ) == null ) { oitl = itl ; break ; } } } if ( oitl == null ) { oitl = new CuOrSrTxLn ( ) ; oitl . setIsNew ( true ) ; osl . getItTxs ( ) . add ( oitl ) ; } oitl . setItsOwner ( osl ) ; Tax tx = new Tax ( ) ; tx . setItsId ( citl . getTax ( ) . getItsId ( ) ) ; tx . setItsName ( citl . getTax ( ) . getItsName ( ) ) ; oitl . setTax ( tx ) ; oitl . setTot ( citl . getTot ( ) ) ; } } ol = osl ; } ol . setItsName ( pCartLn . getItsName ( ) ) ; ol . setDescr ( pCartLn . getDescr ( ) ) ; ol . setUom ( pCartLn . getUom ( ) ) ; ol . setPrice ( pCartLn . getPrice ( ) ) ; ol . setQuant ( pCartLn . getQuant ( ) ) ; ol . setSubt ( pCartLn . getSubt ( ) ) ; ol . setTot ( pCartLn . getTot ( ) ) ; ol . setTotTx ( pCartLn . getTotTx ( ) ) ; ol . setTxCat ( pCartLn . getTxCat ( ) ) ; ol . setTxDsc ( pCartLn . getTxDsc ( ) ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIOBObjTypeToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class Goro { /** * Creates a Goro implementation that binds to { @ link com . stanfy . enroscar . goro . GoroService } * in order to run scheduled tasks in service context . * This method is functionally identical to * < pre > * BoundGoro goro = Goro . bindWith ( context , new BoundGoro . OnUnexpectedDisconnection ( ) { * public void onServiceDisconnected ( BoundGoro goro ) { * goro . bind ( ) ; * < / pre > * @ param context context that will bind to the service * @ return Goro implementation that binds to { @ link GoroService } . * @ see # bindWith ( Context , BoundGoro . OnUnexpectedDisconnection ) */ public static BoundGoro bindAndAutoReconnectWith ( final Context context ) { } }
if ( context == null ) { throw new IllegalArgumentException ( "Context cannot be null" ) ; } return new BoundGoroImpl ( context , null ) ;
public class ScenarioImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < ElementParameters > getElementParameters ( ) { } }
if ( elementParameters == null ) { elementParameters = new EObjectContainmentEList < ElementParameters > ( ElementParameters . class , this , BpsimPackage . SCENARIO__ELEMENT_PARAMETERS ) ; } return elementParameters ;
public class AbstractResourceAdapterDeployer { /** * print Failures into Log files . * @ param validator validator validator validator instance used to run validation rules * @ param failures failures failures the list of Failures to be printed * @ param fhInput fhInput fhInput optional parameter . Normally used only for test or in case of * FailureHelper already present in context * @ return the error Text */ public String printFailuresLog ( Validator validator , Collection < Failure > failures , FailureHelper ... fhInput ) { } }
String errorText = "" ; FailureHelper fh = null ; if ( fhInput . length == 0 ) fh = new FailureHelper ( failures ) ; else fh = fhInput [ 0 ] ; if ( failures != null && failures . size ( ) > 0 ) { errorText = fh . asText ( validator . getResourceBundle ( ) ) ; } return errorText ;
public class AntlrCodeQualityHelper { /** * Remove duplicate bitset declarations to reduce the size of the static initializer but keep the bitsets * that match the given pattern with a normalized name . */ public String removeDuplicateBitsets ( String javaContent , AntlrOptions options ) { } }
if ( ! options . isOptimizeCodeQuality ( ) ) { return javaContent ; } return removeDuplicateFields ( javaContent , followsetPattern , 1 , 2 , "\\bFOLLOW_\\w+\\b" , "FOLLOW_%d" , options . getKeptBitSetsPattern ( ) , options . getKeptBitSetName ( ) ) ;
public class VoiceApi { /** * Place a call on hold * Place the specified call on hold . * @ param id The connection ID of the call . ( required ) * @ param holdData Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > holdWithHttpInfo ( String id , HoldData holdData ) throws ApiException { } }
com . squareup . okhttp . Call call = holdValidateBeforeCall ( id , holdData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class SuppressionInfo { /** * Returns true if this checker should be considered suppressed given the signals present in this * object . * @ param suppressible Holds information about the suppressibilty of a checker * @ param suppressedInGeneratedCode true if this checker instance should be considered suppressed * if the signals in this object say we ' re in generated code . */ public SuppressedState suppressedState ( Suppressible suppressible , boolean suppressedInGeneratedCode ) { } }
if ( inGeneratedCode && suppressedInGeneratedCode ) { return SuppressedState . SUPPRESSED ; } if ( suppressible . supportsSuppressWarnings ( ) && ! Collections . disjoint ( suppressible . allNames ( ) , suppressWarningsStrings ) ) { return SuppressedState . SUPPRESSED ; } if ( ! Collections . disjoint ( suppressible . customSuppressionAnnotations ( ) , customSuppressions ) ) { return SuppressedState . SUPPRESSED ; } return SuppressedState . UNSUPPRESSED ;
public class SpanOfWeekdays { /** * / * [ deutsch ] * < p > Erzeugt eine neue Spanne von Wochentagen . < / p > * < p > Es ist m & ouml ; glich , denselben Wochentag f & uuml ; r Start und Ende zu w & auml ; hlen . In diesem * Fall besteht die Spanne aus genau einem Wochentag . < / p > * @ param start the starting weekday * @ param end the ending weekday ( inclusive ) * @ return new span of weekdays */ public static SpanOfWeekdays between ( Weekday start , Weekday end ) { } }
if ( start == null || end == null ) { throw new NullPointerException ( "Missing day of week." ) ; } return new SpanOfWeekdays ( start , end ) ;
public class MPDAdmin { /** * Sends the appropriate { @ link org . bff . javampd . playlist . PlaylistChangeEvent } to all registered * { @ link org . bff . javampd . playlist . PlaylistChangeListener } . * @ param event the event id to send */ protected synchronized void fireOutputChangeEvent ( OUTPUT_EVENT event ) { } }
OutputChangeEvent oce = new OutputChangeEvent ( this , event ) ; for ( OutputChangeListener pcl : outputListeners ) { pcl . outputChanged ( oce ) ; }
public class ServiceGraphModule { /** * Creates a service graph with default configuration , which is able to * handle { @ code Service , BlockingService , Closeable , ExecutorService , * Timer , DataSource , EventloopService , EventloopServer } and * { @ code Eventloop } as services . * @ return default service graph */ public static ServiceGraphModule defaultInstance ( ) { } }
return newInstance ( ) . register ( Service . class , forService ( ) ) . register ( BlockingService . class , forBlockingService ( ) ) . register ( BlockingSocketServer . class , forBlockingSocketServer ( ) ) . register ( Closeable . class , forCloseable ( ) ) . register ( ExecutorService . class , forExecutorService ( ) ) . register ( Timer . class , forTimer ( ) ) . register ( DataSource . class , forDataSource ( ) ) . register ( EventloopService . class , forEventloopService ( ) ) . register ( EventloopServer . class , forEventloopServer ( ) ) . register ( Eventloop . class , forEventloop ( ) ) ;
public class LoggingScopeImpl { /** * Log message . * @ param message */ private void log ( final String message ) { } }
if ( this . optionalParams . isPresent ( ) ) { logger . log ( logLevel , message , params ) ; } else { logger . log ( logLevel , message ) ; }
public class AdapterUtil { /** * Display the javax . xa . XAResource start flag constant corresponding to the * value supplied . * @ param level a valid javax . xa . XAResource start flag constant . * @ return the name of the constant , or a string indicating the constant is unknown . */ public static String getXAResourceStartFlagString ( int flag ) { } }
switch ( flag ) { case XAResource . TMJOIN : return "TMJOIN (" + flag + ')' ; case XAResource . TMNOFLAGS : return "TMNOFLAGS (" + flag + ')' ; case XAResource . TMRESUME : return "TMRESUME (" + flag + ')' ; } return "UNKNOWN XA RESOURCE START FLAG (" + flag + ')' ;
public class PathTrie { /** * add a path to the path trie * @ param path */ public void addPath ( String path ) { } }
if ( path == null ) { return ; } String [ ] pathComponents = path . split ( "/" ) ; TrieNode parent = rootNode ; String part = null ; if ( pathComponents . length <= 1 ) { throw new IllegalArgumentException ( "Invalid path " + path ) ; } for ( int i = 1 ; i < pathComponents . length ; i ++ ) { part = pathComponents [ i ] ; if ( parent . getChild ( part ) == null ) { parent . addChild ( part , new TrieNode ( parent ) ) ; } parent = parent . getChild ( part ) ; } parent . setProperty ( true ) ;
public class MessagePack { /** * Creates an unpacker that deserializes objects from subarray of a specified byte array . * This method provides an optimized implementation of < code > newDefaultUnpacker ( new ByteArrayInputStream ( contents , offset , length ) ) < / code > . * This method is equivalent to < code > DEFAULT _ UNPACKER _ CONFIG . newDefaultUnpacker ( contents ) < / code > . * @ param contents The byte array that contains packed objects * @ param offset The index of the first byte * @ param length The number of bytes * @ return A new MessageUnpacker instance that will never throw IOException */ public static MessageUnpacker newDefaultUnpacker ( byte [ ] contents , int offset , int length ) { } }
return DEFAULT_UNPACKER_CONFIG . newUnpacker ( contents , offset , length ) ;
public class SendAutomationSignalRequest { /** * The data sent with the signal . The data schema depends on the type of signal used in the request . * @ return The data sent with the signal . The data schema depends on the type of signal used in the request . */ public java . util . Map < String , java . util . List < String > > getPayload ( ) { } }
return payload ;
public class Generic1AggPooledTopNScannerPrototype { /** * Any changes to this method should be coordinated with { @ link TopNUtils } , { @ link * PooledTopNAlgorithm # computeSpecializedScanAndAggregateImplementations } and downstream methods . * It should be checked with a tool like https : / / github . com / AdoptOpenJDK / jitwatch that C2 compiler output for this * method doesn ' t have any method calls in the while loop , i . e . all method calls are inlined . To be able to see * assembly of this method in JITWatch and other similar tools , { @ link * PooledTopNAlgorithm # specializeGeneric1AggPooledTopN } should be turned off . Note that in this case the benchmark * should be " naturally monomorphic " , i . e . execute this method always with the same runtime shape . * If the while loop contains not inlined method calls , it should be considered as a performance bug . */ @ Override public long scanAndAggregate ( DimensionSelector dimensionSelector , BufferAggregator aggregator , int aggregatorSize , Cursor cursor , int [ ] positions , ByteBuffer resultsBuffer ) { } }
long processedRows = 0 ; int positionToAllocate = 0 ; while ( ! cursor . isDoneOrInterrupted ( ) ) { final IndexedInts dimValues = dimensionSelector . getRow ( ) ; final int dimSize = dimValues . size ( ) ; for ( int i = 0 ; i < dimSize ; i ++ ) { int dimIndex = dimValues . get ( i ) ; int position = positions [ dimIndex ] ; if ( position >= 0 ) { aggregator . aggregate ( resultsBuffer , position ) ; } else if ( position == TopNAlgorithm . INIT_POSITION_VALUE ) { positions [ dimIndex ] = positionToAllocate ; position = positionToAllocate ; aggregator . init ( resultsBuffer , position ) ; aggregator . aggregate ( resultsBuffer , position ) ; positionToAllocate += aggregatorSize ; } } processedRows ++ ; cursor . advanceUninterruptibly ( ) ; } return processedRows ;
public class ExtractorMojo { /** * Prepare and initialize necessary internal values before actually accessing Sonar - HLA . */ void prepare ( ) { } }
if ( getMeasures ( ) == null || getMeasures ( ) . isEmpty ( ) ) setMeasureObjects ( Arrays . asList ( HLAMeasure . values ( ) ) ) ; else setMeasureObjects ( HLAMeasure . convert ( getMeasures ( ) ) ) ; if ( getUserName ( ) != null && ! getUserName ( ) . isEmpty ( ) ) setExtractor ( SonarHLAFactory . getExtractor ( getHostUrl ( ) , getUserName ( ) , getPassword ( ) ) ) ; else setExtractor ( SonarHLAFactory . getExtractor ( getHostUrl ( ) ) ) ; setConverter ( SonarHLAFactory . getConverterInstance ( ) ) ; LOG . info ( "Initialized " + getMeasureObjects ( ) . size ( ) + " measures." ) ;
public class FacesConfigMapEntriesTypeImpl { /** * If not already created , a new < code > map - entry < / code > element will be created and returned . * Otherwise , the first existing < code > map - entry < / code > element will be returned . * @ return the instance defined for the element < code > map - entry < / code > */ public FacesConfigMapEntryType < FacesConfigMapEntriesType < T > > getOrCreateMapEntry ( ) { } }
List < Node > nodeList = childNode . get ( "map-entry" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigMapEntryTypeImpl < FacesConfigMapEntriesType < T > > ( this , "map-entry" , childNode , nodeList . get ( 0 ) ) ; } return createMapEntry ( ) ;
public class PropertyController { /** * Gets the list of properties for a given entity and for the current user . * @ param entityType Entity type * @ param id Entity ID * @ return List of properties */ @ RequestMapping ( value = "{entityType}/{id}" , method = RequestMethod . GET ) public Resources < Resource < Property < ? > > > getProperties ( @ PathVariable ProjectEntityType entityType , @ PathVariable ID id ) { } }
ProjectEntity entity = getEntity ( entityType , id ) ; List < Property < ? > > properties = propertyService . getProperties ( entity ) ; List < Resource < Property < ? > > > resources = new ArrayList < > ( ) ; for ( Property < ? > property : properties ) { Resource < Property < ? > > resource = Resource . of ( property , uri ( on ( getClass ( ) ) . getPropertyValue ( entity . getProjectEntityType ( ) , entity . getId ( ) , property . getType ( ) . getClass ( ) . getName ( ) ) ) ) ; // Update resource = resource . with ( Link . UPDATE , uri ( on ( getClass ( ) ) . getPropertyEditionForm ( entity . getProjectEntityType ( ) , entity . getId ( ) , property . getType ( ) . getClass ( ) . getName ( ) ) ) ) ; // OK resources . add ( resource ) ; } return Resources . of ( resources , uri ( on ( getClass ( ) ) . getProperties ( entityType , id ) ) ) ;
public class FlowController { /** * Add a property - related message that will be shown with the Errors and Error tags . * @ param propertyName the name of the property with which to associate this error . * @ param messageKey the message - resources key for the message . * @ param messageArgs zero or more arguments to the message . */ protected void addActionError ( String propertyName , String messageKey , Object [ ] messageArgs ) { } }
InternalUtils . addActionError ( propertyName , new ActionMessage ( messageKey , messageArgs ) , getRequest ( ) ) ;
public class MultiLayerNetwork { /** * Sets the input and labels and returns the F1 score for the prediction with respect to the true labels * @ param data the data to score * @ return the score for the given input , label pairs */ @ Override public double f1Score ( org . nd4j . linalg . dataset . api . DataSet data ) { } }
return f1Score ( data . getFeatures ( ) , data . getLabels ( ) ) ;
public class NumberExpression { /** * Create a { @ code this > = right } expression * @ param < A > * @ param right rhs of the comparison * @ return { @ code this > = right } * @ see java . lang . Comparable # compareTo ( Object ) */ public final < A extends Number & Comparable < ? > > BooleanExpression goe ( A right ) { } }
return goe ( ConstantImpl . create ( cast ( right ) ) ) ;
public class Mailer { /** * Validates an { @ link Email } instance . Validation fails if the subject is missing , content is missing , or no recipients are defined . * @ param email The email that needs to be configured correctly . * @ return Always < code > true < / code > ( throws a { @ link MailException } exception if validation fails ) . * @ throws MailException Is being thrown in any of the above causes . * @ see EmailValidationUtil */ public boolean validate ( final Email email ) throws MailException { } }
if ( email . getText ( ) == null && email . getTextHTML ( ) == null ) { throw new MailException ( MailException . MISSING_CONTENT ) ; } else if ( email . getSubject ( ) == null || email . getSubject ( ) . equals ( "" ) ) { throw new MailException ( MailException . MISSING_SUBJECT ) ; } else if ( email . getRecipients ( ) . size ( ) == 0 ) { throw new MailException ( MailException . MISSING_RECIPIENT ) ; } else if ( email . getFromRecipient ( ) == null ) { throw new MailException ( MailException . MISSING_SENDER ) ; } else { if ( ! EmailValidationUtil . isValid ( email . getFromRecipient ( ) . getAddress ( ) , emailAddressValidationCriteria ) ) { throw new MailException ( String . format ( MailException . INVALID_SENDER , email ) ) ; } for ( final Recipient recipient : email . getRecipients ( ) ) { if ( ! EmailValidationUtil . isValid ( recipient . getAddress ( ) , emailAddressValidationCriteria ) ) { throw new MailException ( String . format ( MailException . INVALID_RECIPIENT , email ) ) ; } } } return true ;
public class AbstractJdbcStoreConfigurationBuilder { /** * Use the specified { @ link ConnectionFactoryConfigurationBuilder } to configure connections to * the database */ @ Override public < C extends ConnectionFactoryConfigurationBuilder < ? > > C connectionFactory ( C builder ) { } }
if ( connectionFactory != null ) { throw new IllegalStateException ( "A ConnectionFactory has already been configured for this store" ) ; } this . connectionFactory = ( ConnectionFactoryConfigurationBuilder < ConnectionFactoryConfiguration > ) builder ; return builder ;
public class CacheableWorkspaceDataManager { /** * Unregister remote commands . */ private void unregisterRemoteCommands ( ) { } }
if ( rpcService != null ) { rpcService . unregisterCommand ( suspend ) ; rpcService . unregisterCommand ( resume ) ; rpcService . unregisterCommand ( requestForResponsibleForResuming ) ; }
public class StringUtils { /** * Determines whether the String value contains any whitespace , guarding against null values . * @ param value the String value being evaluated for whitespace containment . * @ return a boolean value indicating whether the String value contains any whitespace . * @ see # toCharArray ( String ) * @ see java . lang . Character # isWhitespace ( char ) */ @ NullSafe public static boolean containsWhitespace ( String value ) { } }
for ( char chr : toCharArray ( value ) ) { if ( Character . isWhitespace ( chr ) ) { return true ; } } return false ;
public class DomUtil { /** * Allows to apply a { @ link NodeListFilter } to a { @ link NodeList } . This allows to remove all elements from a node list which do not match the Filter . * @ param nodeList the { @ link NodeList } to filter * @ param filter the { @ link NodeListFilter } to apply to the { @ link NodeList } * @ return the List of all Nodes which match the filter */ public static List < DomElement > filterNodeList ( NodeList nodeList , NodeListFilter filter ) { } }
List < DomElement > filteredList = new ArrayList < DomElement > ( ) ; for ( int i = 0 ; i < nodeList . getLength ( ) ; i ++ ) { Node node = nodeList . item ( i ) ; if ( filter . matches ( node ) ) { filteredList . add ( new DomElementImpl ( ( Element ) node ) ) ; } } return filteredList ;
public class WebSocketChunkedInput { /** * Fetches a chunked data from the stream . Once this method returns the last chunk * and thus the stream has reached at its end , any subsequent { @ link # isEndOfInput ( ) } * call must return { @ code true } . * @ param allocator { @ link ByteBufAllocator } * @ return { @ link WebSocketFrame } contain chunk of data */ @ Override public WebSocketFrame readChunk ( ByteBufAllocator allocator ) throws Exception { } }
ByteBuf buf = input . readChunk ( allocator ) ; if ( buf == null ) { return null ; } return new ContinuationWebSocketFrame ( input . isEndOfInput ( ) , rsv , buf ) ;
public class SharedInformerFactory { /** * Start all registered informers . */ public synchronized void startAllRegisteredInformers ( ) { } }
if ( Collections . isEmptyMap ( informers ) ) { return ; } informers . forEach ( ( informerType , informer ) -> { if ( ! startedInformers . containsKey ( informerType ) ) { startedInformers . put ( informerType , informerExecutor . submit ( informer :: run ) ) ; } } ) ;
public class Tuple10 { /** * Skip 4 degrees from this tuple . */ public final Tuple6 < T5 , T6 , T7 , T8 , T9 , T10 > skip4 ( ) { } }
return new Tuple6 < > ( v5 , v6 , v7 , v8 , v9 , v10 ) ;
public class OrcFile { /** * Create an ORC file reader . * @ param fs file system * @ param path file name to read from * @ return a new ORC file reader . * @ throws IOException */ public static Reader createReader ( FileSystem fs , Path path , Configuration conf ) throws IOException { } }
return new ReaderImpl ( fs , path , conf ) ;
public class SerializedFormBuilder { /** * Build the information for the method . * @ param node the XML element that specifies which components to document * @ param methodsContentTree content tree to which the documentation will be added * @ throws DocletException if there is a problem while building the documentation */ public void buildMethodInfo ( XMLNode node , Content methodsContentTree ) throws DocletException { } }
if ( configuration . nocomment ) { return ; } buildChildren ( node , methodsContentTree ) ;
public class Discovery { /** * Add query to training data . * Adds a query to the training data for this collection . The query can contain a filter and natural language query . * @ param addTrainingDataOptions the { @ link AddTrainingDataOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link TrainingQuery } */ public ServiceCall < TrainingQuery > addTrainingData ( AddTrainingDataOptions addTrainingDataOptions ) { } }
Validator . notNull ( addTrainingDataOptions , "addTrainingDataOptions cannot be null" ) ; String [ ] pathSegments = { "v1/environments" , "collections" , "training_data" } ; String [ ] pathParameters = { addTrainingDataOptions . environmentId ( ) , addTrainingDataOptions . collectionId ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "discovery" , "v1" , "addTrainingData" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; final JsonObject contentJson = new JsonObject ( ) ; if ( addTrainingDataOptions . naturalLanguageQuery ( ) != null ) { contentJson . addProperty ( "natural_language_query" , addTrainingDataOptions . naturalLanguageQuery ( ) ) ; } if ( addTrainingDataOptions . filter ( ) != null ) { contentJson . addProperty ( "filter" , addTrainingDataOptions . filter ( ) ) ; } if ( addTrainingDataOptions . examples ( ) != null ) { contentJson . add ( "examples" , GsonSingleton . getGson ( ) . toJsonTree ( addTrainingDataOptions . examples ( ) ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( TrainingQuery . class ) ) ;
public class DisconnectCustomKeyStoreRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DisconnectCustomKeyStoreRequest disconnectCustomKeyStoreRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( disconnectCustomKeyStoreRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disconnectCustomKeyStoreRequest . getCustomKeyStoreId ( ) , CUSTOMKEYSTOREID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RandomNumberUtils { /** * Returns a random int that is less than the given int . * @ param maxExclusive the value that returned int must be less than * @ return the random int * @ throws IllegalArgumentException if maxExclusive is less than or equal to { @ link * Integer # MIN _ VALUE } */ public static int randomIntLessThan ( int maxExclusive ) { } }
checkArgument ( maxExclusive > Integer . MIN_VALUE , "Cannot produce int less than %s" , Integer . MIN_VALUE ) ; return randomInt ( Integer . MIN_VALUE , maxExclusive ) ;
public class nsacl6 { /** * Use this API to disable nsacl6. */ public static base_response disable ( nitro_service client , nsacl6 resource ) throws Exception { } }
nsacl6 disableresource = new nsacl6 ( ) ; disableresource . acl6name = resource . acl6name ; return disableresource . perform_operation ( client , "disable" ) ;
public class AmazonRoute53ResolverClient { /** * For DNS queries that originate in your VPCs , specifies which resolver endpoint the queries pass through , one * domain name that you want to forward to your network , and the IP addresses of the DNS resolvers in your network . * @ param createResolverRuleRequest * @ return Result of the CreateResolverRule operation returned by the service . * @ throws InvalidParameterException * One or more parameters in this request are not valid . * @ throws InvalidRequestException * The request is invalid . * @ throws LimitExceededException * The request caused one or more limits to be exceeded . * @ throws ResourceNotFoundException * The specified resource doesn ' t exist . * @ throws ResourceExistsException * The resource that you tried to create already exists . * @ throws ResourceUnavailableException * The specified resource isn ' t available . * @ throws InternalServiceErrorException * We encountered an unknown error . Try again in a few minutes . * @ throws ThrottlingException * The request was throttled . Try again in a few minutes . * @ sample AmazonRoute53Resolver . CreateResolverRule * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53resolver - 2018-04-01 / CreateResolverRule " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateResolverRuleResult createResolverRule ( CreateResolverRuleRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateResolverRule ( request ) ;
public class CmsXmlInheritGroupContainerHandler { /** * Returns the elements of the given inheritance group for the request context URI . < p > * @ param cms the current cms context * @ param resource the inheritance group resource * @ return the elements */ public static List < CmsContainerElementBean > loadInheritContainerElements ( CmsObject cms , CmsResource resource ) { } }
CmsInheritanceReferenceParser parser = new CmsInheritanceReferenceParser ( cms ) ; try { parser . parse ( resource ) ; CmsInheritanceReference ref = parser . getReference ( cms . getRequestContext ( ) . getLocale ( ) ) ; if ( ref != null ) { String name = ref . getName ( ) ; CmsADEManager adeManager = OpenCms . getADEManager ( ) ; CmsInheritedContainerState result = adeManager . getInheritedContainerState ( cms , cms . getRequestContext ( ) . getRootUri ( ) , name ) ; return result . getElements ( false ) ; } } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return Collections . emptyList ( ) ;
public class PBufferGraphics { /** * Initialise the PBuffer that will be used to render to * @ throws SlickException */ private void init ( ) throws SlickException { } }
try { Texture tex = InternalTextureLoader . get ( ) . createTexture ( image . getWidth ( ) , image . getHeight ( ) , image . getFilter ( ) ) ; final RenderTexture rt = new RenderTexture ( false , true , false , false , RenderTexture . RENDER_TEXTURE_2D , 0 ) ; pbuffer = new Pbuffer ( screenWidth , screenHeight , new PixelFormat ( 8 , 0 , 0 ) , rt , null ) ; // Initialise state of the pbuffer context . pbuffer . makeCurrent ( ) ; initGL ( ) ; GL . glBindTexture ( GL11 . GL_TEXTURE_2D , tex . getTextureID ( ) ) ; pbuffer . releaseTexImage ( Pbuffer . FRONT_LEFT_BUFFER ) ; image . draw ( 0 , 0 ) ; image . setTexture ( tex ) ; Display . makeCurrent ( ) ; } catch ( Exception e ) { Log . error ( e ) ; throw new SlickException ( "Failed to create PBuffer for dynamic image. OpenGL driver failure?" ) ; }
public class SeaGlassTextFieldUI { /** * DOCUMENT ME ! * @ return DOCUMENT ME ! */ protected Rectangle getCancelButtonBounds ( ) { } }
JTextComponent c = getComponent ( ) ; final int x = c . getWidth ( ) - c . getHeight ( ) / 2 - 9 ; final int y = c . getHeight ( ) / 2 - 8 ; return new Rectangle ( x , y , 17 , 17 ) ;
public class CleanableHiveDataset { /** * Drops the partitions selected by { @ link # hiveSelectionPolicy } . Also deletes the data associated with it . * If an { @ link Exception } occurs while processing a { @ link Partition } , other { @ link Partition } s will still be deleted . * However , a { @ link RuntimeException } is thrown at the end if there was at least one { @ link Exception } . */ @ Override public void clean ( ) throws IOException { } }
List versions = Lists . newArrayList ( this . hiveDatasetVersionFinder . findDatasetVersions ( this ) ) ; if ( versions . isEmpty ( ) ) { log . warn ( String . format ( "No dataset version can be found. Ignoring %s" , this . getTable ( ) . getCompleteName ( ) ) ) ; return ; } Collections . sort ( versions , Collections . reverseOrder ( ) ) ; Collection < HiveDatasetVersion > deletableVersions = this . hiveSelectionPolicy . listSelectedVersions ( versions ) ; log . info ( String . format ( "Cleaning dataset %s .Will drop %s out of %s partitions." , datasetURN ( ) , deletableVersions . size ( ) , versions . size ( ) ) ) ; List < Exception > exceptions = Lists . newArrayList ( ) ; for ( HiveDatasetVersion hiveDatasetVersion : deletableVersions ) { try { // Initialize the version cleaner HiveDatasetVersionCleaner hiveDatasetVersionCleaner = new HiveDatasetVersionCleaner ( hiveDatasetVersion , this ) ; // Perform pre - clean actions hiveDatasetVersionCleaner . preCleanAction ( ) ; // Perform actual cleaning hiveDatasetVersionCleaner . clean ( ) ; // Perform post - clean actions eg . swap partitions hiveDatasetVersionCleaner . postCleanAction ( ) ; } catch ( IOException e ) { exceptions . add ( e ) ; } } if ( ! exceptions . isEmpty ( ) ) { throw new RuntimeException ( String . format ( "Deletion failed for %s partitions" , exceptions . size ( ) ) ) ; }
public class APSPSolver { /** * Remove a constraint ( SimpleDistanceConstraint ) */ @ Override protected void removeConstraintsSub ( Constraint [ ] con ) { } }
logger . finest ( "Trying to remove constraints " + Arrays . toString ( con ) + "..." ) ; if ( con != null && con . length != 0 ) { Bounds [ ] tot = new Bounds [ con . length ] ; int [ ] from = new int [ con . length ] ; int [ ] to = new int [ con . length ] ; for ( int i = 0 ; i < con . length ; i ++ ) { if ( con [ i ] instanceof SimpleDistanceConstraint ) { SimpleDistanceConstraint c = ( SimpleDistanceConstraint ) con [ i ] ; tot [ i ] = new Bounds ( c . getMinimum ( ) , c . getMaximum ( ) ) ; from [ i ] = ( ( TimePoint ) c . getFrom ( ) ) . getID ( ) ; to [ i ] = ( ( TimePoint ) c . getTo ( ) ) . getID ( ) ; } } if ( canRestoreDMatrix ( con ) ) cDelete ( tot , from , to , true ) ; else { if ( backupDMatrixSimple ) resetDMatrixBackups ( ) ; cDelete ( tot , from , to , false ) ; } }
public class ModelDef { /** * Checks constraints on this model . * @ param checkLevel The amount of checks to perform * @ throws ConstraintException If a constraint has been violated */ public void checkConstraints ( String checkLevel ) throws ConstraintException { } }
// check constraints now after all classes have been processed for ( Iterator it = getClasses ( ) ; it . hasNext ( ) ; ) { ( ( ClassDescriptorDef ) it . next ( ) ) . checkConstraints ( checkLevel ) ; } // additional model constraints that either deal with bigger parts of the model or // can only be checked after the individual classes have been checked ( e . g . specific // attributes have been ensured ) new ModelConstraints ( ) . check ( this , checkLevel ) ;
public class LoggingConfigUtils { /** * Find , create , and validate the log directory . * @ param newValue * New parameter value to parse / evaluate * @ param defaultValue * Starting / Previous log directory - - this value might * also * be null . * @ return defaultValue if the newValue is null or is was badly * formatted , or the converted new value */ static File getLogDirectory ( Object newValue , File defaultDirectory ) { } }
File newDirectory = defaultDirectory ; // If a value was specified , try creating a file with it if ( newValue != null && newValue instanceof String ) { newDirectory = new File ( ( String ) newValue ) ; } if ( newDirectory == null ) { String value = "." ; try { value = AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < String > ( ) { @ Override public String run ( ) throws Exception { return System . getProperty ( "user.dir" ) ; } } ) ; } catch ( Exception ex ) { // do nothing } newDirectory = new File ( value ) ; } return LoggingFileUtils . validateDirectory ( newDirectory ) ;
public class MethodUtils { /** * < p > Given an arguments array passed to a varargs method , return an array of arguments in the canonical form , * i . e . an array with the declared number of parameters , and whose last parameter is an array of the varargs type . * @ param args the array of arguments passed to the varags method * @ param methodParameterTypes the declared array of method parameter types * @ return an array of the variadic arguments passed to the method * @ since 3.5 */ static Object [ ] getVarArgs ( final Object [ ] args , final Class < ? > [ ] methodParameterTypes ) { } }
if ( args . length == methodParameterTypes . length && args [ args . length - 1 ] . getClass ( ) . equals ( methodParameterTypes [ methodParameterTypes . length - 1 ] ) ) { // The args array is already in the canonical form for the method . return args ; } // Construct a new array matching the method ' s declared parameter types . final Object [ ] newArgs = new Object [ methodParameterTypes . length ] ; // Copy the normal ( non - varargs ) parameters System . arraycopy ( args , 0 , newArgs , 0 , methodParameterTypes . length - 1 ) ; // Construct a new array for the variadic parameters final Class < ? > varArgComponentType = methodParameterTypes [ methodParameterTypes . length - 1 ] . getComponentType ( ) ; final int varArgLength = args . length - methodParameterTypes . length + 1 ; Object varArgsArray = Array . newInstance ( ClassUtils . primitiveToWrapper ( varArgComponentType ) , varArgLength ) ; // Copy the variadic arguments into the varargs array . System . arraycopy ( args , methodParameterTypes . length - 1 , varArgsArray , 0 , varArgLength ) ; if ( varArgComponentType . isPrimitive ( ) ) { // unbox from wrapper type to primitive type varArgsArray = ArrayUtils . toPrimitive ( varArgsArray ) ; } // Store the varargs array in the last position of the array to return newArgs [ methodParameterTypes . length - 1 ] = varArgsArray ; // Return the canonical varargs array . return newArgs ;
public class LinearClassifier { /** * Print all features active for a particular datum and the weight that * the classifier assigns to each class for those features . */ public void justificationOf ( Datum < L , F > example , PrintWriter pw ) { } }
justificationOf ( example , pw , null ) ;
public class InProcessVoltDBServer { /** * Create and connect a client to the in - process VoltDB server . * Note , client will be automatically closed when the in - process server * is shut down . * Must be called after { @ link # start ( ) } . * @ return Connected client . * @ throws Exception on failure to connect properly . */ public Client getClient ( ) throws Exception { } }
ClientConfig config = new ClientConfig ( ) ; // turn off the timeout for debugging config . setProcedureCallTimeout ( 0 ) ; Client client = ClientFactory . createClient ( config ) ; // track this client so it can be closed at shutdown trackedClients . add ( client ) ; client . createConnection ( "localhost" ) ; return client ;
public class CPInstancePersistenceImpl { /** * Returns the cp instance where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching cp instance , or < code > null < / code > if a matching cp instance could not be found */ @ Override public CPInstance fetchByUUID_G ( String uuid , long groupId ) { } }
return fetchByUUID_G ( uuid , groupId , true ) ;
public class Groups { /** * Groups elements from an iterator in the key evaluated from the passed * function . E . g : * < code > groupBy ( [ 1 , 2 , 3 , 1 ] , id ) - > { 1 : [ 1,1 ] , 2 : [ 2 ] , 3 : [ 3 ] } < / code > * @ param < K > the grouped key type * @ param < V > the grouped value type * @ param < C > the collection type used as value of the map * @ param groupies the elements to be grouped * @ param grouper the function used to group elements * @ param collectionProvider the values collection supplier * @ return a map containing grouped values */ public static < K , V , C extends Collection < V > > Map < K , C > groupBy ( Iterator < V > groupies , Function < V , K > grouper , Supplier < C > collectionProvider ) { } }
return new GroupBy < > ( grouper , collectionProvider , new HashMapFactory < > ( ) ) . apply ( groupies ) ;
public class FilePickerFragment { /** * Request permission to write to the SD - card . */ @ Override protected void handlePermission ( @ NonNull File path ) { } }
// Should we show an explanation ? // if ( shouldShowRequestPermissionRationale ( // Manifest . permission . WRITE _ EXTERNAL _ STORAGE ) ) { // Explain to the user why we need permission mRequestedPath = path ; requestPermissions ( new String [ ] { Manifest . permission . WRITE_EXTERNAL_STORAGE } , PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE ) ;
public class LocalisationManager { /** * Method getTRMFacade * @ return _ _ trmFacade */ public TRMFacade getTRMFacade ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getTRMFacade" , this ) ; // Instantiate DA manager to interface to WLM if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getTRMFacade" , _trmFacade ) ; return _trmFacade ;
public class Handler { /** * Compares the host components of two URLs . * @ param u1 the URL of the first host to compare * @ param u2 the URL of the second host to compare * @ return < tt > true < / tt > if and only if they * are equal , < tt > false < / tt > otherwise . */ protected boolean hostsEqual ( URL u1 , URL u2 ) { } }
/* * Special case for file : URLs * per RFC 1738 no hostname is equivalent to ' localhost ' * i . e . file : / / / path is equal to file : / / localhost / path */ String s1 = u1 . getHost ( ) ; String s2 = u2 . getHost ( ) ; if ( "localhost" . equalsIgnoreCase ( s1 ) && ( s2 == null || "" . equals ( s2 ) ) ) return true ; if ( "localhost" . equalsIgnoreCase ( s2 ) && ( s1 == null || "" . equals ( s1 ) ) ) return true ; return super . hostsEqual ( u1 , u2 ) ;
public class SessionManager { /** * Binds the specified Session object into this SessionManager ' s active * Session registry . This method is typically called internally as * the final step , when a successful connection has been made . * @ param db the database to which the new Session is initially connected * @ param user the Session User * @ param readonly the ReadOnly attribute for the new Session * @ param forLog true when session is for reading a log * @ param timeZoneSeconds the session time zone second interval * @ return Session */ public synchronized Session newSession ( Database db , User user , boolean readonly , boolean forLog , int timeZoneSeconds ) { } }
Session s = new Session ( db , user , ! forLog , ! forLog , readonly , sessionIdCount , timeZoneSeconds ) ; s . isProcessingLog = forLog ; sessionMap . put ( sessionIdCount , s ) ; sessionIdCount ++ ; return s ;
public class FragmentMvpViewStateDelegateImpl { /** * Creates the presenter instance if not able to reuse presenter from PresenterManager */ private P restorePresenterOrRecreateNewPresenterAfterProcessDeath ( ) { } }
P presenter ; if ( keepPresenterInstanceDuringScreenOrientationChanges ) { if ( mosbyViewId != null && ( presenter = PresenterManager . getPresenter ( getActivity ( ) , mosbyViewId ) ) != null ) { // Presenter restored from cache if ( DEBUG ) { Log . d ( DEBUG_TAG , "Reused presenter " + presenter + " for view " + delegateCallback . getMvpView ( ) ) ; } return presenter ; } else { // No presenter found in cache , most likely caused by process death presenter = createViewIdAndPresenter ( ) ; if ( DEBUG ) { Log . d ( DEBUG_TAG , "No presenter found although view Id was here: " + mosbyViewId + ". Most likely this was caused by a process death. New Presenter created" + presenter + " for view " + delegateCallback . getMvpView ( ) ) ; } return presenter ; } } else { // starting first time , so create a new presenter presenter = createViewIdAndPresenter ( ) ; if ( DEBUG ) { Log . d ( DEBUG_TAG , "New presenter " + presenter + " for view " + delegateCallback . getMvpView ( ) ) ; } return presenter ; }
public class Jwts { /** * Returns a new { @ link Claims } instance populated with the specified name / value pairs . * @ param claims the name / value pairs to populate the new Claims instance . * @ return a new { @ link Claims } instance populated with the specified name / value pairs . */ public static Claims claims ( Map < String , Object > claims ) { } }
return Classes . newInstance ( "io.jsonwebtoken.impl.DefaultClaims" , MAP_ARG , claims ) ;
public class Completable { /** * Returns a Completable that first runs this Completable * and then the other completable . * < img width = " 640 " height = " 437 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / Completable . andThen . c . png " alt = " " > * This is an alias for { @ link # concatWith ( CompletableSource ) } . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code andThen } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param next the other Completable , not null * @ return the new Completable instance * @ throws NullPointerException if other is null */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final Completable andThen ( CompletableSource next ) { } }
return concatWith ( next ) ;