signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConsumerDispatcher { /** * Helper methods to create a ConsumerKey . Can be overridden by subclasses * @ param consumerPoint * @ param getCursor * @ param selector * @ param connectionUuid * @ return */ protected ConsumerKey createConsumerKey ( DispatchableConsumerPoint consumerPoint , SelectionCriteria criteria , SIBUuid12 connectionUuid , boolean readAhead , boolean forwardScanning , JSConsumerSet consumerSet ) throws SISelectorSyntaxException , SIDiscriminatorSyntaxException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createConsumerKey" , new Object [ ] { consumerPoint , criteria , connectionUuid , Boolean . valueOf ( readAhead ) , Boolean . valueOf ( forwardScanning ) , consumerSet } ) ; ConsumerKey key = new LocalQPConsumerKey ( consumerPoint , this , criteria , connectionUuid , forwardScanning , consumerSet ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createConsumerKey" , key ) ; return key ;
public class FacesMessage { /** * < p > Persist { @ link javax . faces . application . FacesMessage } artifacts , * including the non serializable < code > Severity < / code > . < / p > */ private void writeObject ( ObjectOutputStream out ) throws IOException { } }
out . defaultWriteObject ( ) ; out . writeInt ( severity . getOrdinal ( ) ) ; out . writeObject ( summary ) ; out . writeObject ( detail ) ; out . writeObject ( rendered ) ;
public class Project { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( PROJECT_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class Series { /** * The type of series * @ param type the type to set * @ return */ public Series setType ( SeriesType type ) { } }
if ( type != null ) { this . type = type . name ( ) . toLowerCase ( ) ; } else { this . type = null ; } return this ;
public class RoundedMoney { /** * ( non - Javadoc ) * @ see javax . money . MonetaryAmount # adjust ( javax . money . AmountAdjuster ) */ @ Override public RoundedMoney with ( MonetaryOperator operator ) { } }
Objects . requireNonNull ( operator ) ; try { return RoundedMoney . from ( operator . apply ( this ) ) ; } catch ( MonetaryException | ArithmeticException e ) { throw e ; } catch ( Exception e ) { throw new MonetaryException ( "Query failed: " + operator , e ) ; }
public class SSLConnectionLink { /** * @ see com . ibm . wsspi . channelfw . base . OutboundProtocolLink # close ( com . ibm . wsspi . channelfw . VirtualConnection , java . lang . Exception ) */ @ Override public void close ( VirtualConnection inVC , Exception e ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "close, vc=" + getVCHash ( ) ) ; } // Set closed flag so that ready can ' t be called again in an error condition . // This is a protective measure . closed = true ; // Clean up the read and write interfaces as well as the SSL engine . // cleanup has logic to avoid writing if stop ( 0 ) has been called cleanup ( ) ; // If the channel has already processed the close signal , it is too late to try and clean up the individual connection links here . // This race condition should not happen if channels above us are well behaved , so not using synchronize logic here , so as not to // impact mainline performance . if ( this . sslChannel . getstop0Called ( ) != true ) { if ( getDeviceLink ( ) != null ) { getDeviceLink ( ) . close ( inVC , e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "close" ) ; }
public class GatewayMessageCodec { /** * Encode given { @ code message } to given { @ code byteBuf } . * @ param message - input message to be encoded . * @ throws MessageCodecException in case of issues during encoding . */ public ByteBuf encode ( GatewayMessage message ) throws MessageCodecException { } }
ByteBuf byteBuf = ByteBufAllocator . DEFAULT . buffer ( ) ; try ( JsonGenerator generator = jsonFactory . createGenerator ( ( OutputStream ) new ByteBufOutputStream ( byteBuf ) , JsonEncoding . UTF8 ) ) { generator . writeStartObject ( ) ; // headers for ( Entry < String , String > header : message . headers ( ) . entrySet ( ) ) { String fieldName = header . getKey ( ) ; String value = header . getValue ( ) ; switch ( fieldName ) { case STREAM_ID_FIELD : case SIGNAL_FIELD : case INACTIVITY_FIELD : case RATE_LIMIT_FIELD : generator . writeNumberField ( fieldName , Long . parseLong ( value ) ) ; break ; default : generator . writeStringField ( fieldName , value ) ; } } // data Object data = message . data ( ) ; if ( data != null ) { if ( data instanceof ByteBuf ) { ByteBuf dataBin = ( ByteBuf ) data ; if ( dataBin . isReadable ( ) ) { try { generator . writeFieldName ( DATA_FIELD ) ; generator . writeRaw ( ":" ) ; generator . flush ( ) ; byteBuf . writeBytes ( dataBin ) ; } finally { if ( releaseDataOnEncode ) { ReferenceCountUtil . safestRelease ( dataBin ) ; } } } } else { generator . writeObjectField ( DATA_FIELD , data ) ; } } generator . writeEndObject ( ) ; } catch ( Throwable ex ) { ReferenceCountUtil . safestRelease ( byteBuf ) ; Optional . ofNullable ( message . data ( ) ) . ifPresent ( ReferenceCountUtil :: safestRelease ) ; LOGGER . error ( "Failed to encode message: {}" , message , ex ) ; throw new MessageCodecException ( "Failed to encode message" , ex ) ; } return byteBuf ;
public class MetadataRepositoryImpl { /** * { @ inheritDoc } */ public Metadata findByName ( String name ) { } }
for ( Metadata m : metadata ) { if ( m . getName ( ) . equals ( name ) ) return m ; } return null ;
public class DateUtils { /** * 添加分钟 * @ param date 日期 * @ param amount 数量 * @ return 添加后的日期 */ public static Date addMinute ( Date date , int amount ) { } }
return add ( date , Calendar . MINUTE , amount ) ;
public class PropertyUtil { /** * This method returns the property as an integer value . * @ param name The property name * @ param def The optional default value * @ return The property as an integer , or null if not found */ public static Integer getPropertyAsInteger ( String name , Integer def ) { } }
String value = getProperty ( name ) ; if ( value != null ) { try { return Integer . parseInt ( value ) ; } catch ( NumberFormatException e ) { LOG . log ( Level . WARNING , "Failed to convert property value '" + value + "' to integer" , e ) ; } } return def ;
public class ThreadLocalPrivilegedTenantContext { /** * { @ inheritDoc } */ @ Override public < V > V execute ( Tenant tenant , Supplier < V > supplier ) { } }
return TenantContextDataHolder . execute ( tenant , supplier ) ;
public class IndexFacesResult { /** * An array of faces detected and added to the collection . For more information , see Searching Faces in a Collection * in the Amazon Rekognition Developer Guide . * @ param faceRecords * An array of faces detected and added to the collection . For more information , see Searching Faces in a * Collection in the Amazon Rekognition Developer Guide . */ public void setFaceRecords ( java . util . Collection < FaceRecord > faceRecords ) { } }
if ( faceRecords == null ) { this . faceRecords = null ; return ; } this . faceRecords = new java . util . ArrayList < FaceRecord > ( faceRecords ) ;
public class MessageIntegrityAttribute { /** * Encodes < tt > message < / tt > using < tt > key < / tt > and the HMAC - SHA1 algorithm * as per RFC 2104 and returns the resulting byte array . This is a utility * method that generates content for the { @ link MessageIntegrityAttribute } * regardless of the credentials being used ( short or long term ) . * @ param message * the STUN message that the resulting content will need to * travel in . * @ param offset * the index where data starts in < tt > message < / tt > . * @ param length * the length of the data in < tt > message < / tt > that the method * should consider . * @ param key * the key that we should be using for the encoding ( which * depends on whether we are using short or long term * credentials ) . * @ return the HMAC that should be used in a * < tt > MessageIntegrityAttribute < / tt > transported by * < tt > message < / tt > . * @ throws IllegalArgumentException * if the encoding fails for some reason . */ public static byte [ ] calculateHmacSha1 ( byte [ ] message , int offset , int length , byte [ ] key ) throws IllegalArgumentException { } }
try { // get an HMAC - SHA1 key from the raw key bytes SecretKeySpec signingKey = new SecretKeySpec ( key , HMAC_SHA1_ALGORITHM ) ; // get an HMAC - SHA1 Mac instance and initialize it with the key Mac mac = Mac . getInstance ( HMAC_SHA1_ALGORITHM ) ; mac . init ( signingKey ) ; // compute the hmac on input data bytes byte [ ] macInput = new byte [ length ] ; System . arraycopy ( message , offset , macInput , 0 , length ) ; return mac . doFinal ( macInput ) ; } catch ( Exception exc ) { throw new IllegalArgumentException ( "Could not create HMAC-SHA1 request encoding" , exc ) ; }
public class AbstractWebPageSecurityObjectWithAttributes { /** * Validate custom data of the input field . * @ param aWPEC * Current web page execution context . Never < code > null < / code > . * @ param aSelectedObject * The selected object . May be < code > null < / code > . * @ param aFormErrors * The form errors to be filled . Never < code > null < / code > . * @ param eFormAction * The form action mode . Either create , copy or edit . * @ return The custom parameter to be added to the used upon success . If an * error occurred , this map may be < code > null < / code > . */ @ OverrideOnDemand @ Nullable protected ICommonsMap < String , String > validateCustomInputParameters ( @ Nonnull final WPECTYPE aWPEC , @ Nullable final DATATYPE aSelectedObject , @ Nonnull final FormErrorList aFormErrors , @ Nonnull final EWebPageFormAction eFormAction ) { } }
return null ;
public class JLanguageTool { /** * Use this method if you want to access LanguageTool ' s otherwise * internal analysis of the text . For actual text checking , use the { @ code check . . . } methods instead . * @ param text The text to be analyzed * @ since 2.5 */ public List < AnalyzedSentence > analyzeText ( String text ) throws IOException { } }
List < String > sentences = sentenceTokenize ( text ) ; return analyzeSentences ( sentences ) ;
public class BundleDelegatingPageMounter { /** * { @ inheritDoc } */ public void addBundle ( ExtendedBundle bundle ) { } }
String symbolicName = bundle . getBundle ( ) . getSymbolicName ( ) ; if ( bundle . isRelevantForMountPointAnnotations ( ) ) { LOGGER . trace ( "Scanning bundle {} for PaxWicketMountPoint annotations" , symbolicName ) ; ArrayList < DefaultPageMounter > pageMounter = new ArrayList < DefaultPageMounter > ( ) ; Collection < Class < ? > > allClasses = bundle . getAllClasses ( ) ; for ( Class < ? > clazz : allClasses ) { PaxWicketMountPoint mountPoint = clazz . getAnnotation ( PaxWicketMountPoint . class ) ; if ( mountPoint != null ) { if ( ! Page . class . isAssignableFrom ( clazz ) ) { LOGGER . warn ( "ignore PaxWicketMountPoint annotated class {} since it is no page class or has unresolved optional dependencies..." , clazz . getName ( ) ) ; continue ; } DefaultPageMounter mountPointRegistration = new DefaultPageMounter ( applicationName , paxWicketContext ) ; // We have checked this before . . . @ SuppressWarnings ( "unchecked" ) Class < ? extends Page > pageClass = ( Class < ? extends Page > ) clazz ; mountPointRegistration . addMountPoint ( mountPoint . mountPoint ( ) , pageClass ) ; mountPointRegistration . register ( ) ; pageMounter . add ( mountPointRegistration ) ; LOGGER . info ( "Mounting page {} at {}" , clazz . getName ( ) , mountPoint . mountPoint ( ) ) ; } } synchronized ( mountPointRegistrations ) { if ( mountPointRegistrations . containsKey ( symbolicName ) ) { removeBundle ( bundle ) ; } mountPointRegistrations . put ( bundle . getID ( ) , pageMounter ) ; } } else { LOGGER . debug ( "Ignore bundle " + symbolicName + " for PageMounting." ) ; }
public class SplittingBAMIndexer { /** * Process the given record for the index . * @ param rec the record from the file being indexed * @ throws IOException */ public void processAlignment ( final SAMRecord rec ) throws IOException { } }
// write an offset for the first record and for the g - th record thereafter ( where // g is the granularity ) , to be consistent with the index method if ( count == 0 || ( count + 1 ) % granularity == 0 ) { SAMFileSource fileSource = rec . getFileSource ( ) ; SAMFileSpan filePointer = fileSource . getFilePointer ( ) ; writeVirtualOffset ( getPos ( filePointer ) ) ; } count ++ ;
public class KeyStoreUtil { /** * Update a key store with the keys found in a server PEM and its key file . * @ param pKeyStore keystore to update * @ param pServerCert server certificate * @ param pServerKey server key * @ param pKeyAlgo algorithm used in the keystore ( e . g . " RSA " ) * @ param pPassword password to use for the key file . must not be null , use < code > char [ 0 ] < / code > * for an empty password . */ public static void updateWithServerPems ( KeyStore pKeyStore , File pServerCert , File pServerKey , String pKeyAlgo , char [ ] pPassword ) throws IOException , CertificateException , NoSuchAlgorithmException , InvalidKeySpecException , KeyStoreException { } }
InputStream is = new FileInputStream ( pServerCert ) ; try { CertificateFactory certFactory = CertificateFactory . getInstance ( "X509" ) ; X509Certificate cert = ( X509Certificate ) certFactory . generateCertificate ( is ) ; byte [ ] keyBytes = decodePem ( pServerKey ) ; PrivateKey privateKey ; KeyFactory keyFactory = KeyFactory . getInstance ( pKeyAlgo ) ; try { // First let ' s try PKCS8 privateKey = keyFactory . generatePrivate ( new PKCS8EncodedKeySpec ( keyBytes ) ) ; } catch ( InvalidKeySpecException e ) { // Otherwise try PKCS1 RSAPrivateCrtKeySpec keySpec = PKCS1Util . decodePKCS1 ( keyBytes ) ; privateKey = keyFactory . generatePrivate ( keySpec ) ; } String alias = cert . getSubjectX500Principal ( ) . getName ( ) ; pKeyStore . setKeyEntry ( alias , privateKey , pPassword , new Certificate [ ] { cert } ) ; } finally { is . close ( ) ; }
public class JSDocInfoBuilder { /** * Records that the { @ link JSDocInfo } being built should have its * { @ link JSDocInfo # isNoCompile ( ) } flag set to { @ code true } . * @ return { @ code true } if the no compile flag was recorded and { @ code false } * if it was already recorded */ public boolean recordNoCompile ( ) { } }
if ( ! currentInfo . isNoCompile ( ) ) { currentInfo . setNoCompile ( true ) ; populated = true ; return true ; } else { return false ; }
public class CostlessMeldPairingHeap { /** * Delete a node . * @ param n * the node */ private void delete ( Node < K , V > n ) { } }
if ( n != root && n . o_s == null && n . poolIndex == Node . NO_INDEX ) { // no root , no parent and no pool index throw new IllegalArgumentException ( "Invalid handle!" ) ; } // node has a parent if ( n . o_s != null ) { // cut oldest child Node < K , V > oldestChild = cutOldestChild ( n ) ; if ( oldestChild != null ) { linkInPlace ( oldestChild , n ) ; } else { cutFromParent ( n ) ; } } // node has no parent // cut children Node < K , V > childrenTree = combine ( cutChildren ( n ) ) ; boolean checkConsolidate = false ; if ( childrenTree != null ) { checkConsolidate = true ; addPool ( childrenTree , true ) ; } size -- ; if ( n == root ) { root = null ; consolidate ( ) ; checkConsolidate = false ; } else if ( n . poolIndex != Node . NO_INDEX ) { byte curIndex = n . poolIndex ; decreasePool [ curIndex ] = decreasePool [ decreasePoolSize - 1 ] ; decreasePool [ curIndex ] . poolIndex = curIndex ; decreasePool [ decreasePoolSize - 1 ] = null ; decreasePoolSize -- ; n . poolIndex = Node . NO_INDEX ; if ( curIndex == decreasePoolMinPos ) { // in decrease pool , and also the minimum consolidate ( ) ; checkConsolidate = false ; } else { // in decrease pool , and not the minimum if ( decreasePoolMinPos == decreasePoolSize ) { decreasePoolMinPos = curIndex ; } checkConsolidate = true ; } } // if decrease pool has > = ceil ( logn ) trees , consolidate if ( checkConsolidate ) { double sizeAsDouble = size ; if ( decreasePoolSize >= Math . getExponent ( sizeAsDouble ) + 1 ) { consolidate ( ) ; } }
public class GroupElement { /** * Returns true if name matches pattern */ protected boolean name_matches ( String pattern ) { } }
pattern = pattern . toLowerCase ( ) . replaceAll ( "[*]{1}" , ".*?" ) ; return name . toLowerCase ( ) . matches ( pattern ) || get_fully_qualified_name ( ) . toLowerCase ( ) . matches ( pattern ) ;
public class KerasConstraintUtils { /** * Map Keras to DL4J constraint . * @ param kerasConstraint String containing Keras constraint name * @ param conf Keras layer configuration * @ return DL4J LayerConstraint * @ see LayerConstraint */ public static LayerConstraint mapConstraint ( String kerasConstraint , KerasLayerConfiguration conf , Map < String , Object > constraintConfig ) throws UnsupportedKerasConfigurationException { } }
LayerConstraint constraint ; if ( kerasConstraint . equals ( conf . getLAYER_FIELD_MINMAX_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MINMAX_NORM_CONSTRAINT_ALIAS ( ) ) ) { double min = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MINMAX_MIN_CONSTRAINT ( ) ) ; double max = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MINMAX_MAX_CONSTRAINT ( ) ) ; double rate = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_RATE ( ) ) ; int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new MinMaxNormConstraint ( min , max , rate , dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT_ALIAS_2 ( ) ) ) { double max = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MAX_CONSTRAINT ( ) ) ; int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new MaxNormConstraint ( max , dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT_ALIAS_2 ( ) ) ) { int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new UnitNormConstraint ( dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT_ALIAS_2 ( ) ) ) { constraint = new NonNegativeConstraint ( ) ; } else { throw new UnsupportedKerasConfigurationException ( "Unknown keras constraint " + kerasConstraint ) ; } return constraint ;
public class RouteHandler { /** * Gets the request URI . * @ param request the specified request * @ return requestURI */ private String getRequestURI ( final HttpServletRequest request ) { } }
String ret = ( String ) request . getAttribute ( Keys . HttpRequest . REQUEST_URI ) ; if ( StringUtils . isBlank ( ret ) ) { ret = request . getRequestURI ( ) ; } return ret ;
public class PicketBoxSecurityIntegration { /** * { @ inheritDoc } */ public org . ironjacamar . core . spi . security . SecurityContext createSecurityContext ( String sd ) throws Exception { } }
org . jboss . security . SecurityContext sc = SecurityContextFactory . createSecurityContext ( sd ) ; return new PicketBoxSecurityContext ( sc ) ;
public class Day { /** * Find the n ' th xxxxday of s specified month ( for instance find 1st sunday * of May 2006 ; findNthOfMonth ( 1 , Calendar . SUNDAY , Calendar . MAY , 2006 ) ; * Return null if the specified day doesn ' t exists . * @ param n Nth day to look for . * @ param dayOfWeek Day to look for ( Calendar . XXXDAY ) . * @ param month Month to check ( Calendar . XXX ) . * @ param year Year to check . * @ return Required Day ( or null if non - existent ) * @ throws IllegalArgumentException if dyaOfWeek parameter * doesn ' t represent a valid day . */ public static Day getNthOfMonth ( int n , int dayOfWeek , int month , int year ) { } }
// Validate the dayOfWeek argument if ( dayOfWeek < 0 || dayOfWeek > 6 ) throw new IllegalArgumentException ( "Invalid day of week: " + dayOfWeek ) ; LocalDateTime localDateTime = LocalDateTime . of ( year , month , 0 , 0 , 0 ) ; return new Day ( localDateTime . with ( TemporalAdjusters . next ( DayOfWeek . of ( dayOfWeek ) ) ) . toLocalDate ( ) ) ;
public class HysteresisEdgeTracePoints { /** * Checks to see if the given coordinate is above the lower threshold . If it is the point will be * added to the current segment or be the start of a new segment . * @ param parent The edge segment which is being checked * @ param match Has a match to the current segment already been found ? * @ return true if a match was found at this point */ private boolean check ( int x , int y , EdgeSegment parent , boolean match ) { } }
if ( intensity . isInBounds ( x , y ) ) { int index = intensity . getIndex ( x , y ) ; if ( intensity . data [ index ] >= lower ) { intensity . data [ index ] = MARK_TRAVERSED ; if ( ! match ) { Point2D_I32 p = queuePoints . grow ( ) ; p . set ( x , y ) ; parent . points . add ( p ) ; } else { // a match was found so it can ' t just be added to the current edge startNewSegment ( x , y , parent ) ; } return true ; } } return false ;
public class ConfusingAutoboxedOverloading { /** * fills out a set of method details for possibly confusing method signatures * @ param cls * the current class being parsed * @ param methodInfo * a collection to hold possibly confusing methods */ private void populateMethodInfo ( JavaClass cls , Map < String , Set < String > > methodInfo ) { } }
try { if ( Values . DOTTED_JAVA_LANG_OBJECT . equals ( cls . getClassName ( ) ) ) { return ; } Method [ ] methods = cls . getMethods ( ) ; for ( Method m : methods ) { String sig = m . getSignature ( ) ; if ( isPossiblyConfusingSignature ( sig ) ) { String name = m . getName ( ) ; Set < String > sigs = methodInfo . get ( name ) ; if ( sigs == null ) { sigs = new HashSet < > ( 3 ) ; methodInfo . put ( name , sigs ) ; } sigs . add ( m . getSignature ( ) ) ; } } populateMethodInfo ( cls . getSuperClass ( ) , methodInfo ) ; } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; }
public class CategoryController { /** * Adds a view / presenter pair to the respective HashMaps . */ public void addView ( Category category , CategoryView view , CategoryPresenter presenter ) { } }
views . put ( category , view ) ; presenters . put ( category , presenter ) ;
public class LambdaResource { /** * The array of ARNs for < a > S3Resource < / a > objects to trigger the < a > LambdaResource < / a > objects associated with this * job . * @ param eventTriggers * The array of ARNs for < a > S3Resource < / a > objects to trigger the < a > LambdaResource < / a > objects associated * with this job . */ public void setEventTriggers ( java . util . Collection < EventTriggerDefinition > eventTriggers ) { } }
if ( eventTriggers == null ) { this . eventTriggers = null ; return ; } this . eventTriggers = new java . util . ArrayList < EventTriggerDefinition > ( eventTriggers ) ;
public class DubboClientWrapper { /** * 对外提供的接口 , 获取指定类型的dubbo客户端引用 * 如果之前创建过 , 则直接从缓存中获取 , 不必再次创建 * @ param clientType * @ param < T > * @ return */ public static < T extends Object > T getWrapper ( Class < T > clientType ) { } }
return getWrapper ( clientType , generateClientId ( clientType ) ) ;
public class PointWiseCombinor { /** * { @ inheritDoc } */ public SparseDoubleVector combineUnmodified ( SparseDoubleVector v1 , SparseDoubleVector v2 ) { } }
return VectorMath . multiplyUnmodified ( v1 , v2 ) ;
public class HostStorageSystem { /** * Set NFS username and password on the host . The specified password is stored encrypted at the host and overwrites * any previous password configuration . This information is only needed when the host has mounted NFS volumes with * security types that require user credentials for accessing data . The password is used to acquire credentials that * the NFS client needs to use in order to secure NFS traffic using RPCSECGSS . The client will access files on all * volumes mounted on this host ( that are mounted with the relevant security type ) on behalf of specified user . * At present , this API supports only file system NFSv4.1. * @ param user Username * @ param password Passowrd * @ throws HostConfigFault * @ throws RuntimeFault * @ throws RemoteException * @ since 6.0 */ public void setNFSUser ( String user , String password ) throws HostConfigFault , RuntimeFault , RemoteException { } }
getVimService ( ) . setNFSUser ( getMOR ( ) , user , password ) ;
public class ApiOvhMe { /** * remove this partition * REST : DELETE / me / installationTemplate / { templateName } / partitionScheme / { schemeName } / partition / { mountpoint } * @ param templateName [ required ] This template name * @ param schemeName [ required ] name of this partitioning scheme * @ param mountpoint [ required ] partition mount point */ public void installationTemplate_templateName_partitionScheme_schemeName_partition_mountpoint_DELETE ( String templateName , String schemeName , String mountpoint ) throws IOException { } }
String qPath = "/me/installationTemplate/{templateName}/partitionScheme/{schemeName}/partition/{mountpoint}" ; StringBuilder sb = path ( qPath , templateName , schemeName , mountpoint ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ;
public class AttributeValue { /** * An attribute of type Map . For example : * < code > " M " : { " Name " : { " S " : " Joe " } , " Age " : { " N " : " 35 " } } < / code > * @ param m * An attribute of type Map . For example : < / p > * < code > " M " : { " Name " : { " S " : " Joe " } , " Age " : { " N " : " 35 " } } < / code > * @ return Returns a reference to this object so that method calls can be chained together . */ public AttributeValue withM ( java . util . Map < String , AttributeValue > m ) { } }
setM ( m ) ; return this ;
public class FctBnSeSelEntityProcs { /** * < p > Get PrcSeSrvSpecEmbFlDel ( create and put into map ) . < / p > * @ param pAddParam additional param * @ return requested PrcSeSrvSpecEmbFlDel * @ throws Exception - an exception */ protected final PrcSeSrvSpecEmbFlDel < RS > lazyGetPrcSeSrvSpecEmbFlDel ( final Map < String , Object > pAddParam ) throws Exception { } }
String beanName = PrcSeSrvSpecEmbFlDel . class . getSimpleName ( ) ; @ SuppressWarnings ( "unchecked" ) PrcSeSrvSpecEmbFlDel < RS > proc = ( PrcSeSrvSpecEmbFlDel < RS > ) this . processorsMap . get ( beanName ) ; if ( proc == null ) { proc = new PrcSeSrvSpecEmbFlDel < RS > ( ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; proc . setFindSeSeller ( getFindSeSeller ( ) ) ; proc . setWebAppPath ( getWebAppPath ( ) ) ; proc . setUploadDirectory ( getUploadDirectory ( ) ) ; // assigning fully initialized object : this . processorsMap . put ( beanName , proc ) ; this . logger . info ( null , FctBnSeSelEntityProcs . class , beanName + " has been created." ) ; } return proc ;
public class CommerceTaxMethodLocalServiceBaseImpl { /** * Creates a new commerce tax method with the primary key . Does not add the commerce tax method to the database . * @ param commerceTaxMethodId the primary key for the new commerce tax method * @ return the new commerce tax method */ @ Override @ Transactional ( enabled = false ) public CommerceTaxMethod createCommerceTaxMethod ( long commerceTaxMethodId ) { } }
return commerceTaxMethodPersistence . create ( commerceTaxMethodId ) ;
public class FirewallClient { /** * Retrieves the list of firewall rules available to the specified project . * < p > Sample code : * < pre > < code > * try ( FirewallClient firewallClient = FirewallClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( Firewall element : firewallClient . listFirewalls ( project ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final ListFirewallsPagedResponse listFirewalls ( ProjectName project ) { } }
ListFirewallsHttpRequest request = ListFirewallsHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return listFirewalls ( request ) ;
public class CompositeConverter { /** * This method always returns the last object converted from the list */ @ SuppressWarnings ( "unchecked" ) @ Override public Object convert ( Object source ) { } }
Object value = source ; for ( Converter < Object , Object > converter : converters ) { if ( converter != null ) { value = converter . convert ( value ) ; } } return value ;
public class ESJPCompiler { /** * All stored ESJP programs in eFaps are compiled . The system Java compiler * defined from the { @ link ToolProvider tool provider } is used for the * compiler . All old not needed compiled Java classes are automatically * removed . The compiler error and warning are logged ( errors are using * error - level , warnings are using info - level ) . < br > * Debug : < br > * < ul > * < li > < code > null < / code > : By default , only line number and source file information is generated . < / li > * < li > < code > " none " < / code > : Do not generate any debugging information < / li > * < li > Generate only some kinds of debugging information , specified by a comma separated * list of keywords . Valid keywords are : * < ul > * < li > < code > " source " < / code > : Source file debugging information < / li > * < li > < code > " lines " < / code > : Line number debugging information < / li > * < li > < code > " vars " < / code > : Local variable debugging information < / li > * < / ul > * < / li > * < / ul > * @ param _ debug String for the debug option * @ param _ addRuntimeClassPath Must the classpath from the runtime added * to the compiler , default : < code > false < / code > * @ throws InstallationException if the compile failed */ public void compile ( final String _debug , final boolean _addRuntimeClassPath ) throws InstallationException { } }
readESJPPrograms ( ) ; readESJPClasses ( ) ; final JavaCompiler compiler = ToolProvider . getSystemJavaCompiler ( ) ; if ( compiler == null ) { ESJPCompiler . LOG . error ( "no compiler found for compiler !" ) ; } else { // output of used compiler ESJPCompiler . LOG . info ( " Using compiler {}" , compiler . getClass ( ) . getName ( ) ) ; // options for the compiler final List < String > optionList = new ArrayList < > ( ) ; // set classpath ! // ( the list of programs to compile is given to the javac as // argument array , so the class path could be set in front of the // programs to compile ) if ( this . classPathElements != null ) { // different class path separators depending on the OS final String sep = SystemUtils . IS_OS_WINDOWS ? ";" : ":" ; final StringBuilder classPath = new StringBuilder ( ) ; for ( final String classPathElement : this . classPathElements ) { classPath . append ( classPathElement ) . append ( sep ) ; } if ( _addRuntimeClassPath ) { classPath . append ( System . getProperty ( "java.class.path" ) ) ; } optionList . addAll ( Arrays . asList ( "-classpath" , classPath . toString ( ) ) ) ; } else { // set compiler ' s class path to be same as the runtime ' s optionList . addAll ( Arrays . asList ( "-classpath" , System . getProperty ( "java.class.path" ) ) ) ; } // Set the source file encoding name , such as EUCJIS / SJIS . If - encoding is not specified , // the platform default converter is used . optionList . addAll ( Arrays . asList ( "-encoding" , "UTF-8" ) ) ; if ( _debug != null ) { optionList . addAll ( Arrays . asList ( "-g" , _debug ) ) ; } // logging of compiling classes if ( ESJPCompiler . LOG . isInfoEnabled ( ) ) { final List < SourceObject > ls = new ArrayList < > ( this . name2Source . values ( ) ) ; Collections . sort ( ls , ( _arg0 , _arg1 ) -> _arg0 . getJavaName ( ) . compareTo ( _arg1 . getJavaName ( ) ) ) ; for ( final SourceObject obj : ls ) { ESJPCompiler . LOG . info ( " Compiling ESJP '{}'" , obj . getJavaName ( ) ) ; } } final FileManager fm = new FileManager ( compiler . getStandardFileManager ( null , null , null ) ) ; final boolean noErrors = compiler . getTask ( new ErrorWriter ( ) , fm , null , optionList , null , this . name2Source . values ( ) ) . call ( ) ; if ( ! noErrors ) { throw new InstallationException ( "error" ) ; } // store all compiled ESJP ' s for ( final ESJPCompiler . StoreObject obj : this . classFiles . values ( ) ) { obj . write ( ) ; } // delete not needed compiled ESJP classes for ( final Long id : this . class2id . values ( ) ) { try { new Delete ( this . classType , id ) . executeWithoutAccessCheck ( ) ; } catch ( final EFapsException e ) { throw new InstallationException ( "Could not delete ESJP class with id " + id , e ) ; } } }
public class WebACService { /** * Clean the identifier . * @ param identifier the identifier * @ return the cleaned identifier */ private static String cleanIdentifier ( final String identifier ) { } }
final String id = identifier . split ( "#" ) [ 0 ] . split ( "\\?" ) [ 0 ] ; if ( id . endsWith ( "/" ) ) { return id . substring ( 0 , id . length ( ) - 1 ) ; } return id ;
public class IterUtil { /** * 将键列表和值列表转换为Map < br > * 以键为准 , 值与键位置需对应 。 如果键元素数多于值元素 , 多余部分值用null代替 。 < br > * 如果值多于键 , 忽略多余的值 。 * @ param < K > 键类型 * @ param < V > 值类型 * @ param keys 键列表 * @ param values 值列表 * @ return 标题内容Map * @ since 3.1.0 */ public static < K , V > Map < K , V > toMap ( Iterable < K > keys , Iterable < V > values ) { } }
return toMap ( keys , values , false ) ;
public class ClientProcessor { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . utils . watchdog . ActiveObject # onActivityTimeout ( ) */ @ Override public boolean onActivityTimeout ( ) throws Exception { } }
if ( ! transport . isClosed ( ) ) { log . warn ( "#" + id + " Timeout waiting for client activity (" + authTimeout + "s), dropping client." ) ; stop ( ) ; } return true ;
public class GeometryTools { /** * Normalizes a point . * @ param point The point to normalize */ public static void normalize ( Point3d point ) { } }
double sum = Math . sqrt ( point . x * point . x + point . y * point . y + point . z * point . z ) ; point . x = point . x / sum ; point . y = point . y / sum ; point . z = point . z / sum ;
public class SqlBasedRsIterator { /** * returns a proxy or a fully materialized Object from the current row of the * underlying resultset . */ protected Object getObjectFromResultSet ( ) throws PersistenceBrokerException { } }
try { // if all primitive attributes of the object are contained in the ResultSet // the fast direct mapping can be used return super . getObjectFromResultSet ( ) ; } // if the full loading failed we assume that at least PK attributes are contained // in the ResultSet and perform a slower Identity based loading . . . // This may of course also fail and can throw another PersistenceBrokerException catch ( PersistenceBrokerException e ) { Identity oid = getIdentityFromResultSet ( ) ; return getBroker ( ) . getObjectByIdentity ( oid ) ; }
public class QR { /** * Solve the least squares A * x = b . * @ param b right hand side of linear system . * @ param x the output solution vector that minimizes the L2 norm of A * x - b . * @ exception RuntimeException if matrix is rank deficient . */ public void solve ( double [ ] b , double [ ] x ) { } }
if ( b . length != qr . nrows ( ) ) { throw new IllegalArgumentException ( String . format ( "Row dimensions do not agree: A is %d x %d, but B is %d x 1" , qr . nrows ( ) , qr . nrows ( ) , b . length ) ) ; } if ( x . length != qr . ncols ( ) ) { throw new IllegalArgumentException ( "A and x dimensions don't match." ) ; } if ( singular ) { throw new RuntimeException ( "Matrix is rank deficient." ) ; } double [ ] B = b . clone ( ) ; solve ( Matrix . newInstance ( B ) ) ; System . arraycopy ( B , 0 , x , 0 , x . length ) ;
public class ISODateTimeFormat { /** * Creates a date using the ordinal date format . * Specification reference : 5.2.2. * @ param bld the builder * @ param fields the fields * @ param extended true to use extended format * @ param strictISO true to only allow ISO formats * @ since 1.1 */ private static boolean dateByOrdinal ( DateTimeFormatterBuilder bld , Collection < DateTimeFieldType > fields , boolean extended , boolean strictISO ) { } }
boolean reducedPrec = false ; if ( fields . remove ( DateTimeFieldType . year ( ) ) ) { bld . append ( Constants . ye ) ; if ( fields . remove ( DateTimeFieldType . dayOfYear ( ) ) ) { // YYYY - DDD / YYYYDDD appendSeparator ( bld , extended ) ; bld . appendDayOfYear ( 3 ) ; } else { // YYYY / YYYY reducedPrec = true ; } } else if ( fields . remove ( DateTimeFieldType . dayOfYear ( ) ) ) { // - DDD / - DDD bld . appendLiteral ( '-' ) ; bld . appendDayOfYear ( 3 ) ; } return reducedPrec ;
public class DynamoDBTableMapper { /** * Retrieves multiple items from the table using their primary keys . * @ param itemsToGet The items to get . * @ return The list of objects . * @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # batchLoad */ public List < T > batchLoad ( Iterable < T > itemsToGet ) { } }
final Map < String , List < Object > > results = mapper . batchLoad ( itemsToGet ) ; if ( results . isEmpty ( ) ) { return Collections . < T > emptyList ( ) ; } return ( List < T > ) results . get ( mapper . getTableName ( model . targetType ( ) , config ) ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIDEStructure ( ) { } }
if ( ideStructureEClass == null ) { ideStructureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 392 ) ; } return ideStructureEClass ;
public class JmsMessageImpl { /** * @ see javax . jms . Message # getJMSReplyTo ( ) * d246604 Review error logic . * This method uses 3 mechanisms to determine the type of the replyTo destination : * a ) The JMS specific replyURIBytes * b ) using coreConnection . getDestinationConfiguration ( ) * c ) Guessing based on the presence / absence of a discriminator * Prior to 246604 , errors in b ) are returned to the application . This will be changed so that * a failure in codepath b ) causes fallback to c ) . */ @ Override public Destination getJMSReplyTo ( ) throws JMSException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getJMSReplyTo" ) ; // If we have not cached the replyTo destination . if ( replyTo == null ) { List < SIDestinationAddress > rrp = msg . getReverseRoutingPath ( ) ; SICoreConnection siConn = null ; if ( theSession != null ) siConn = theSession . getCoreConnection ( ) ; // Use this utility method to obtain the full representation & store in the cache replyTo = JmsDestinationImpl . getJMSReplyToInternal ( msg , rrp , siConn ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getJMSReplyTo" , replyTo ) ; return replyTo ;
public class ApplicationRouter { /** * 获取application定义 * @ param application application名称 * @ throws ApplicationUndefinedException application未定义 */ public NodeStatus newestDefinition ( String application ) throws ApplicationUndefinedException { } }
if ( Objects . equals ( application , EnvUtil . getApplication ( ) ) ) return LocalNodeManager . singleton . getFullStatus ( ) ; if ( ApplicationDiscovery . singleton != null ) { NodeStatus status = ApplicationDiscovery . singleton . newestDefinition ( application ) ; if ( status != null ) return status ; } throw new ApplicationUndefinedException ( application ) ;
public class ForkJoinTask { /** * Returns an estimate of how many more locally queued tasks are * held by the current worker thread than there are other worker * threads that might steal them . This value may be useful for * heuristic decisions about whether to fork other tasks . In many * usages of ForkJoinTasks , at steady state , each worker should * aim to maintain a small constant surplus ( for example , 3 ) of * tasks , and to process computations locally if this threshold is * exceeded . * < p > This method may be invoked only from within { @ code * ForkJoinPool } computations ( as may be determined using method * { @ link # inForkJoinPool } ) . Attempts to invoke in other contexts * result in exceptions or errors , possibly including { @ code * ClassCastException } . * @ return the surplus number of tasks , which may be negative */ public static int getSurplusQueuedTaskCount ( ) { } }
/* * The aim of this method is to return a cheap heuristic guide * for task partitioning when programmers , frameworks , tools , * or languages have little or no idea about task granularity . * In essence by offering this method , we ask users only about * tradeoffs in overhead vs expected throughput and its * variance , rather than how finely to partition tasks . * In a steady state strict ( tree - structured ) computation , * each thread makes available for stealing enough tasks for * other threads to remain active . Inductively , if all threads * play by the same rules , each thread should make available * only a constant number of tasks . * The minimum useful constant is just 1 . But using a value of * 1 would require immediate replenishment upon each steal to * maintain enough tasks , which is infeasible . Further , * partitionings / granularities of offered tasks should * minimize steal rates , which in general means that threads * nearer the top of computation tree should generate more * than those nearer the bottom . In perfect steady state , each * thread is at approximately the same level of computation * tree . However , producing extra tasks amortizes the * uncertainty of progress and diffusion assumptions . * So , users will want to use values larger , but not much * larger than 1 to both smooth over transient shortages and * hedge against uneven progress ; as traded off against the * cost of extra task overhead . We leave the user to pick a * threshold value to compare with the results of this call to * guide decisions , but recommend values such as 3. * When all threads are active , it is on average OK to * estimate surplus strictly locally . In steady - state , if one * thread is maintaining say 2 surplus tasks , then so are * others . So we can just use estimated queue length . * However , this strategy alone leads to serious mis - estimates * in some non - steady - state conditions ( ramp - up , ramp - down , * other stalls ) . We can detect many of these by further * considering the number of " idle " threads , that are known to * have zero queued tasks , so compensate by a factor of * ( # idle / # active ) threads . */ ForkJoinWorkerThread wt = ( ForkJoinWorkerThread ) Thread . currentThread ( ) ; return wt . workQueue . queueSize ( ) - wt . pool . idlePerActive ( ) ;
public class ClusterMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Cluster cluster , ProtocolMarshaller protocolMarshaller ) { } }
if ( cluster == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cluster . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( cluster . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( cluster . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( cluster . getVersion ( ) , VERSION_BINDING ) ; protocolMarshaller . marshall ( cluster . getEndpoint ( ) , ENDPOINT_BINDING ) ; protocolMarshaller . marshall ( cluster . getRoleArn ( ) , ROLEARN_BINDING ) ; protocolMarshaller . marshall ( cluster . getResourcesVpcConfig ( ) , RESOURCESVPCCONFIG_BINDING ) ; protocolMarshaller . marshall ( cluster . getLogging ( ) , LOGGING_BINDING ) ; protocolMarshaller . marshall ( cluster . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( cluster . getCertificateAuthority ( ) , CERTIFICATEAUTHORITY_BINDING ) ; protocolMarshaller . marshall ( cluster . getClientRequestToken ( ) , CLIENTREQUESTTOKEN_BINDING ) ; protocolMarshaller . marshall ( cluster . getPlatformVersion ( ) , PLATFORMVERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class QueryBuilder { /** * The remainder of two terms , as in { @ code WHERE k = left % right } . */ @ NonNull public static Term remainder ( @ NonNull Term left , @ NonNull Term right ) { } }
return new BinaryArithmeticTerm ( ArithmeticOperator . REMAINDER , left , right ) ;
public class ReflectionUtils { /** * 循环向上转型 , 获取对象的DeclaredField , 并强制设置为可访问 . * 如向上转型到Object仍无法找到 , 返回null . */ public static Field getAccessibleField ( final Object obj , final String fieldName ) { } }
if ( obj == null ) throw new IllegalArgumentException ( "object cannot be null." ) ; if ( fieldName == null || fieldName . trim ( ) . length ( ) == 0 ) throw new IllegalArgumentException ( "fieldName cannot be null." ) ; for ( Class < ? > superClass = obj . getClass ( ) ; superClass != Object . class ; superClass = superClass . getSuperclass ( ) ) { try { Field field = superClass . getDeclaredField ( fieldName ) ; field . setAccessible ( true ) ; return field ; } catch ( NoSuchFieldException e ) { // NOSONAR // Field不在当前类定义 , 继续向上转型 } } return null ;
public class CitrusEndpoints { /** * Creates new KubernetesClient builder . * @ return */ @ SuppressWarnings ( "unchecked" ) public static ClientServerEndpointBuilder < KubernetesClientBuilder , KubernetesClientBuilder > kubernetes ( ) { } }
return new ClientServerEndpointBuilder ( new KubernetesClientBuilder ( ) , new KubernetesClientBuilder ( ) ) { @ Override public EndpointBuilder < ? extends Endpoint > server ( ) { throw new UnsupportedOperationException ( "Citrus Kubernetes stack has no support for server implementation" ) ; } } ;
public class EphemeralKey { /** * Invalidates an ephemeral API key for a given resource . */ public EphemeralKey delete ( RequestOptions options ) throws StripeException { } }
return request ( RequestMethod . DELETE , instanceUrl ( EphemeralKey . class , this . id ) , ( Map < String , Object > ) null , EphemeralKey . class , options ) ;
public class BlockLocation { /** * Implement readFields of Writable */ public void readFields ( DataInput in ) throws IOException { } }
this . offset = in . readLong ( ) ; this . length = in . readLong ( ) ; this . corrupt = in . readBoolean ( ) ; int numNames = in . readInt ( ) ; this . names = new String [ numNames ] ; for ( int i = 0 ; i < numNames ; i ++ ) { Text name = new Text ( ) ; name . readFields ( in ) ; names [ i ] = name . toString ( ) ; } int numHosts = in . readInt ( ) ; for ( int i = 0 ; i < numHosts ; i ++ ) { Text host = new Text ( ) ; host . readFields ( in ) ; hosts [ i ] = host . toString ( ) ; } int numTops = in . readInt ( ) ; Text path = new Text ( ) ; for ( int i = 0 ; i < numTops ; i ++ ) { path . readFields ( in ) ; topologyPaths [ i ] = path . toString ( ) ; }
public class IntTuples { /** * Add an element with the given value at the given index to the given * tuple , creating a new tuple whose { @ link Tuple # getSize ( ) size } is * one larger than that of the given tuple . * @ param t The tuple * @ param index The index where the element should be added * @ param value The value of the new element * @ param result The result tuple * @ return The result tuple * @ throws IndexOutOfBoundsException If the given index is negative * or greater than the { @ link Tuple # getSize ( ) size } of the given * tuple * @ throws IllegalArgumentException If the given result tuple is not * < code > null < / code > and its { @ link Tuple # getSize ( ) size } is not * the size of the input tuple plus one . */ public static MutableIntTuple insertElementAt ( IntTuple t , int index , int value , MutableIntTuple result ) { } }
if ( index < 0 ) { throw new IndexOutOfBoundsException ( "Index " + index + " is negative" ) ; } if ( index > t . getSize ( ) ) // Note : index = = t . getSize ( ) is valid ! { throw new IndexOutOfBoundsException ( "Index " + index + ", size " + t . getSize ( ) ) ; } if ( result == null ) { result = IntTuples . create ( t . getSize ( ) + 1 ) ; } else if ( result . getSize ( ) != t . getSize ( ) + 1 ) { throw new IllegalArgumentException ( "Input size is " + t . getSize ( ) + ", result size must be " + ( t . getSize ( ) + 1 ) + " but is " + result . getSize ( ) ) ; } int counter = 0 ; for ( int i = 0 ; i < index ; i ++ ) { result . set ( counter , t . get ( i ) ) ; counter ++ ; } result . set ( counter , value ) ; counter ++ ; for ( int i = index ; i < t . getSize ( ) ; i ++ ) { result . set ( counter , t . get ( i ) ) ; counter ++ ; } return result ;
public class Socket { /** * Sends a binary frame on the socket . * @ param message the message * @ param bus the Vert . x event bus . */ public void publish ( byte [ ] message , EventBus bus ) { } }
bus . publish ( getBinaryWriteHandlerId ( ) , Buffer . buffer ( message ) ) ;
public class Grammar { public void removeProductions ( int symbol ) { } }
int prod ; for ( prod = getProductionCount ( ) - 1 ; prod >= 0 ; prod -- ) { if ( getLeft ( prod ) == symbol ) { removeProduction ( prod ) ; } }
public class PolicyExecutorImpl { /** * Queue a task to the global executor . * Prereq : maxConcurrencyConstraint permit must already be acquired to reflect the task being queued to global . * If unsuccessful in queuing to global , this method releases the maxConcurrencyConstraint permit . * @ param globalTask task that can execute tasks that are queued to the policy executor . */ private void enqueueGlobal ( GlobalPoolTask globalTask ) { } }
globalTask . expedite = false ; boolean submitted = false ; try { globalExecutor . executeWithoutInterceptors ( globalTask ) ; submitted = true ; } finally { if ( ! submitted ) { maxConcurrencyConstraint . release ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "expedites/maxConcurrency available" , expeditesAvailable , maxConcurrencyConstraint . availablePermits ( ) ) ; } }
public class TranscriptSequence { /** * Remove a CDS or coding sequence from the transcript sequence * @ param accession * @ return */ public CDSSequence removeCDS ( String accession ) { } }
for ( CDSSequence cdsSequence : cdsSequenceList ) { if ( cdsSequence . getAccession ( ) . getID ( ) . equals ( accession ) ) { cdsSequenceList . remove ( cdsSequence ) ; cdsSequenceHashMap . remove ( accession ) ; return cdsSequence ; } } return null ;
public class GatewayServlet { /** * Parses the query string into a map . * @ param queryString */ protected static final QueryMap parseApiRequestQueryParams ( String queryString ) { } }
QueryMap rval = new QueryMap ( ) ; if ( queryString != null ) { try { String [ ] pairSplit = queryString . split ( "&" ) ; // $ NON - NLS - 1 $ for ( String paramPair : pairSplit ) { int idx = paramPair . indexOf ( "=" ) ; // $ NON - NLS - 1 $ String key , value ; if ( idx != - 1 ) { key = URLDecoder . decode ( paramPair . substring ( 0 , idx ) , "UTF-8" ) ; // $ NON - NLS - 1 $ value = URLDecoder . decode ( paramPair . substring ( idx + 1 ) , "UTF-8" ) ; // $ NON - NLS - 1 $ } else { key = URLDecoder . decode ( paramPair , "UTF-8" ) ; // $ NON - NLS - 1 $ value = null ; } rval . add ( key , value ) ; } } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } } return rval ;
public class SARLRuntime { /** * Returns the XML representation of the given SRE . * @ param sre the SRE to serialize . * @ param xml the XML representation of the given SRE . * @ throws CoreException if trying to compute the XML for the SRE state encounters a problem . */ public static void setSREFromXML ( ISREInstall sre , String xml ) throws CoreException { } }
try { final Element root = parseXML ( xml , false ) ; sre . setFromXML ( root ) ; } catch ( Throwable e ) { throw new CoreException ( SARLEclipsePlugin . getDefault ( ) . createStatus ( IStatus . ERROR , e ) ) ; }
public class TransitionUtil { /** * Get the list of visible MenuItems * @ param toolbar * @ return the list of visible MenuItems */ public static List < MenuItem > getVisibleMenuItemList ( @ NonNull Toolbar toolbar ) { } }
List < MenuItem > list = new ArrayList < > ( ) ; for ( int i = 0 ; i < toolbar . getChildCount ( ) ; i ++ ) { final View v = toolbar . getChildAt ( i ) ; if ( v instanceof ActionMenuView ) { int childCount = ( ( ActionMenuView ) v ) . getChildCount ( ) ; for ( int j = 0 ; j < childCount ; j ++ ) { final View innerView = ( ( ActionMenuView ) v ) . getChildAt ( j ) ; if ( innerView instanceof ActionMenuItemView ) { list . add ( ( ( ActionMenuItemView ) innerView ) . getItemData ( ) ) ; } } } } return list ;
public class ExpressRouteCircuitPeeringsInner { /** * Creates or updates a peering in the specified express route circuits . * @ param resourceGroupName The name of the resource group . * @ param circuitName The name of the express route circuit . * @ param peeringName The name of the peering . * @ param peeringParameters Parameters supplied to the create or update express route circuit peering operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ExpressRouteCircuitPeeringInner object if successful . */ public ExpressRouteCircuitPeeringInner beginCreateOrUpdate ( String resourceGroupName , String circuitName , String peeringName , ExpressRouteCircuitPeeringInner peeringParameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , circuitName , peeringName , peeringParameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class XSLTElementProcessor { /** * Receive notification of ignorable whitespace in element content . * @ param handler non - null reference to current StylesheetHandler that is constructing the Templates . * @ param ch The whitespace characters . * @ param start The start position in the character array . * @ param length The number of characters to use from the * character array . */ public void ignorableWhitespace ( StylesheetHandler handler , char ch [ ] , int start , int length ) throws org . xml . sax . SAXException { } }
// no op
public class PipelineService { /** * / * DIAMOND BEGIN */ public MaterialRevisions getRevisionsBasedOnDependencies ( MaterialRevisions actualRevisions , CruiseConfig cruiseConfig , CaseInsensitiveString pipelineName ) { } }
FanInGraph fanInGraph = new FanInGraph ( cruiseConfig , pipelineName , materialRepository , pipelineDao , systemEnvironment , materialConfigConverter ) ; final MaterialRevisions computedRevisions = fanInGraph . computeRevisions ( actualRevisions , pipelineTimeline ) ; fillUpNonOverridableRevisions ( actualRevisions , computedRevisions ) ; return restoreOriginalMaterialConfigAndMaterialOrderUsingFingerprint ( actualRevisions , computedRevisions ) ;
public class BasicHttpClient { /** * This is how framework makes the KeyValue pair when " application / x - www - form - urlencoded " headers * is passed in the request . In case you want to build or prepare the requests differently , * you can override this method via @ UseHttpClient ( YourCustomHttpClient . class ) . * @ param httpUrl * @ param methodName * @ param reqBodyAsString * @ return * @ throws IOException */ public RequestBuilder createFormUrlEncodedRequestBuilder ( String httpUrl , String methodName , String reqBodyAsString ) throws IOException { } }
RequestBuilder requestBuilder = RequestBuilder . create ( methodName ) . setUri ( httpUrl ) ; if ( reqBodyAsString != null ) { Map < String , Object > reqBodyMap = HelperJsonUtils . readObjectAsMap ( reqBodyAsString ) ; List < NameValuePair > reqBody = new ArrayList < > ( ) ; for ( String key : reqBodyMap . keySet ( ) ) { reqBody . add ( new BasicNameValuePair ( key , reqBodyMap . get ( key ) . toString ( ) ) ) ; } HttpEntity httpEntity = new UrlEncodedFormEntity ( reqBody ) ; requestBuilder . setEntity ( httpEntity ) ; requestBuilder . setHeader ( CONTENT_TYPE , APPLICATION_FORM_URL_ENCODED ) ; } return requestBuilder ;
public class GuessDialectUtils { /** * Guess dialect based on given JDBC connection instance , Note : this method does * not close connection * @ param jdbcConnection * The connection * @ return dialect or null if can not guess out which dialect */ public static Dialect guessDialect ( Connection jdbcConnection ) { } }
String databaseName ; String driverName ; int majorVersion ; int minorVersion ; try { DatabaseMetaData meta = jdbcConnection . getMetaData ( ) ; driverName = meta . getDriverName ( ) ; databaseName = meta . getDatabaseProductName ( ) ; majorVersion = meta . getDatabaseMajorVersion ( ) ; minorVersion = meta . getDatabaseMinorVersion ( ) ; } catch ( SQLException e ) { return ( Dialect ) DialectException . throwEX ( e ) ; } return guessDialect ( driverName , databaseName , majorVersion , minorVersion ) ;
public class Resolve { /** * / * Return the most specific of the two methods for a call , * given that both are accessible and applicable . * @ param m1 A new candidate for most specific . * @ param m2 The previous most specific candidate . * @ param env The current environment . * @ param site The original type from where the selection * takes place . * @ param allowBoxing Allow boxing conversions of arguments . * @ param useVarargs Box trailing arguments into an array for varargs . */ Symbol mostSpecific ( List < Type > argtypes , Symbol m1 , Symbol m2 , Env < AttrContext > env , final Type site , boolean useVarargs ) { } }
switch ( m2 . kind ) { case MTH : if ( m1 == m2 ) return m1 ; boolean m1SignatureMoreSpecific = signatureMoreSpecific ( argtypes , env , site , m1 , m2 , useVarargs ) ; boolean m2SignatureMoreSpecific = signatureMoreSpecific ( argtypes , env , site , m2 , m1 , useVarargs ) ; if ( m1SignatureMoreSpecific && m2SignatureMoreSpecific ) { Type mt1 = types . memberType ( site , m1 ) ; Type mt2 = types . memberType ( site , m2 ) ; if ( ! types . overrideEquivalent ( mt1 , mt2 ) ) return ambiguityError ( m1 , m2 ) ; // same signature ; select ( a ) the non - bridge method , or // ( b ) the one that overrides the other , or ( c ) the concrete // one , or ( d ) merge both abstract signatures if ( ( m1 . flags ( ) & BRIDGE ) != ( m2 . flags ( ) & BRIDGE ) ) return ( ( m1 . flags ( ) & BRIDGE ) != 0 ) ? m2 : m1 ; // if one overrides or hides the other , use it TypeSymbol m1Owner = ( TypeSymbol ) m1 . owner ; TypeSymbol m2Owner = ( TypeSymbol ) m2 . owner ; if ( types . asSuper ( m1Owner . type , m2Owner ) != null && ( ( m1 . owner . flags_field & INTERFACE ) == 0 || ( m2 . owner . flags_field & INTERFACE ) != 0 ) && m1 . overrides ( m2 , m1Owner , types , false ) ) return m1 ; if ( types . asSuper ( m2Owner . type , m1Owner ) != null && ( ( m2 . owner . flags_field & INTERFACE ) == 0 || ( m1 . owner . flags_field & INTERFACE ) != 0 ) && m2 . overrides ( m1 , m2Owner , types , false ) ) return m2 ; boolean m1Abstract = ( m1 . flags ( ) & ABSTRACT ) != 0 ; boolean m2Abstract = ( m2 . flags ( ) & ABSTRACT ) != 0 ; if ( m1Abstract && ! m2Abstract ) return m2 ; if ( m2Abstract && ! m1Abstract ) return m1 ; // both abstract or both concrete return ambiguityError ( m1 , m2 ) ; } if ( m1SignatureMoreSpecific ) return m1 ; if ( m2SignatureMoreSpecific ) return m2 ; return ambiguityError ( m1 , m2 ) ; case AMBIGUOUS : // compare m1 to ambiguous methods in m2 AmbiguityError e = ( AmbiguityError ) m2 . baseSymbol ( ) ; boolean m1MoreSpecificThanAnyAmbiguous = true ; boolean allAmbiguousMoreSpecificThanM1 = true ; for ( Symbol s : e . ambiguousSyms ) { Symbol moreSpecific = mostSpecific ( argtypes , m1 , s , env , site , useVarargs ) ; m1MoreSpecificThanAnyAmbiguous &= moreSpecific == m1 ; allAmbiguousMoreSpecificThanM1 &= moreSpecific == s ; } if ( m1MoreSpecificThanAnyAmbiguous ) return m1 ; // if m1 is more specific than some ambiguous methods , but other ambiguous methods are // more specific than m1 , add it as a new ambiguous method : if ( ! allAmbiguousMoreSpecificThanM1 ) e . addAmbiguousSymbol ( m1 ) ; return e ; default : throw new AssertionError ( ) ; }
public class CallbackValidator { /** * { @ inheritDoc } */ @ Override public void validate ( ValidationHelper helper , Context context , String key , Callback t ) { } }
String message ; for ( String urlTemplate : t . keySet ( ) ) { // validate urlTemplate is valid if ( urlTemplate . isEmpty ( ) ) { message = Tr . formatMessage ( tc , "callbackURLTemplateEmpty" ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; continue ; } List < String > vars = RuntimeExpressionUtils . extractURLVars ( urlTemplate ) ; if ( vars == null ) { message = Tr . formatMessage ( tc , "callbackInvalidSubstitutionVariables" , urlTemplate ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } else { // validate replacement items for ( String v : vars ) { if ( ! RuntimeExpressionUtils . isRuntimeExpression ( v ) ) { message = Tr . formatMessage ( tc , "callbackMustBeRuntimeExpression" , v ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } } // replace template fields with sample data String buildURL = urlTemplate ; for ( String v : vars ) { String templateVar = "{" + v + "}" ; buildURL = buildURL . replace ( templateVar , "e" ) ; // buildURL . replace ( templateVar , " e " ) ; / / Sample data } if ( urlTemplate . contains ( "{$" ) ) { // Path within a Callback can contain variables ( e . g . { $ request . query . callbackUrl } / data ) which shouldn ' t be validated since they are not path params if ( OpenAPIUtils . isDebugEnabled ( tc ) ) { Tr . debug ( tc , "Path contains variables. Skip validation of url: " + key ) ; } } else { // validate remaining url // validating buildURL as URI to account for relative paths if ( ! ValidatorUtils . isValidURI ( buildURL ) ) { message = Tr . formatMessage ( tc , "callbackInvalidURL" , urlTemplate ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } } } // validate Path item Object pathItem = t . get ( urlTemplate ) ; if ( ! ( pathItem instanceof PathItem ) ) { message = Tr . formatMessage ( tc , "callbackInvalidPathItem" , urlTemplate ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( ) , message ) ) ; } }
public class PasswordPropertiesField { /** * Add this to the list of properties that must be encrypted . * @ param strProperty */ public void addPasswordProperty ( String strProperty ) { } }
if ( m_setPropertiesDescriptions == null ) m_setPropertiesDescriptions = new HashSet < String > ( ) ; if ( strProperty != null ) m_setPropertiesDescriptions . add ( strProperty ) ; else m_setPropertiesDescriptions . remove ( strProperty ) ;
public class ClassDelegate { /** * needed */ protected ActivityBehavior determineBehaviour ( ActivityBehavior delegateInstance ) { } }
if ( hasMultiInstanceCharacteristics ( ) ) { multiInstanceActivityBehavior . setInnerActivityBehavior ( ( AbstractBpmnActivityBehavior ) delegateInstance ) ; return multiInstanceActivityBehavior ; } return delegateInstance ;
public class DistributedAvatarFileSystem { /** * This ensures that if we have done a client configuration lookup , the * logicalName might have changed and we still need to allow URIs that specify * the old logical name stored in fsName . It also allows paths that don ' t * specify ports . For example : * We would like to support connecting to a HDFS cluster like this * bin / hadoop dfs - ls hdfs : / / < clustername > . < servicename > / * For this purpose we need to add some logic to checkPath ( ) . The * reason is that DAFS replaces the logical name with the one retrieved * from the client configuration lookup and hence without this check , doing * something like : * bin / hadoop dfs - ls hdfs : / / < clustername > . < servicename > / * would give a Wrong FS error , since < clustername > . < servicename > does not match * with the actual URI after lookup which would be something like < host : port > * Therefore in checkPath ( ) we should also allow paths that have a URI with * the old authority ( < clustername > . < servicename > ) . */ @ Override protected void checkPath ( Path path ) { } }
if ( conf . getBoolean ( "client.configuration.lookup.done" , false ) ) { URI uri = path . toUri ( ) ; String thisScheme = this . getUri ( ) . getScheme ( ) ; String thatScheme = uri . getScheme ( ) ; String thisHost = fsName . getHost ( ) ; String thatHost = uri . getHost ( ) ; if ( thatScheme != null && thisScheme . equalsIgnoreCase ( thatScheme ) ) { if ( thisHost != null && thisHost . equalsIgnoreCase ( thatHost ) ) { return ; } } } super . checkPath ( path ) ;
public class PyroProxy { /** * ( re ) connect the proxy to the remote Pyro daemon . */ protected void connect ( ) throws UnknownHostException , IOException { } }
if ( sock == null ) { sock = new Socket ( hostname , port ) ; sock . setKeepAlive ( true ) ; sock . setTcpNoDelay ( true ) ; sock_out = sock . getOutputStream ( ) ; sock_in = sock . getInputStream ( ) ; sequenceNr = 0 ; _handshake ( ) ; if ( Config . METADATA ) { // obtain metadata if this feature is enabled , and the metadata is not known yet if ( ! pyroMethods . isEmpty ( ) || ! pyroAttrs . isEmpty ( ) ) { // not checking _ pyroONeway because that feature already existed and it is not yet deprecated // log . debug ( " reusing existing metadata " ) } else { getMetadata ( this . objectid ) ; } } }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "countryName" ) public JAXBElement < String > createCountryName ( String value ) { } }
return new JAXBElement < String > ( _CountryName_QNAME , String . class , null , value ) ;
public class BeanDescImpl { /** * { @ inheritDoc } */ public < T extends Annotation > T getAnnotation ( Class < T > type ) { } }
return clazz . getAnnotation ( type ) ;
public class PlattSMO { /** * Returns the local decision function for regression training purposes * without the bias term * @ param v the index of the point to select * @ return the decision function output sans bias */ protected double decisionFunctionR ( int v ) { } }
double sum = 0 ; for ( int i = 0 ; i < vecs . size ( ) ; i ++ ) if ( alphas [ i ] != alpha_s [ i ] ) // multipler would be zero sum += ( alphas [ i ] - alpha_s [ i ] ) * kEval ( v , i ) ; return sum ;
public class ProxyList { /** * / / / / / Methods from List interface / / / / / */ @ Override public void add ( final int arg0 , final Object arg1 ) { } }
eagerlyLoadDataCollection ( ) ; List dataList = ( List ) dataCollection ; if ( dataList == null ) { dataList = new ArrayList ( ) ; } if ( arg1 != null && ! dataList . contains ( arg1 ) ) { dataList . add ( arg0 , arg1 ) ; }
public class PropertyLoader { /** * Resolve the config for given element . */ private < T extends AnnotatedElement > Map < T , PropertyInfo > resolveConfig ( String keyPrefix , T element , Set < Class > resolvedConfigs ) { } }
Map < T , PropertyInfo > result = new HashMap < > ( ) ; if ( ! element . isAnnotationPresent ( Config . class ) ) { return result ; } String prefix = concat ( keyPrefix , element . getAnnotation ( Config . class ) . prefix ( ) ) ; Class < ? > returnType = getValueType ( element ) ; checkRecursiveConfigs ( resolvedConfigs , returnType ) ; resolvedConfigs . add ( returnType ) ; Object proxy = populate ( prefix , returnType , resolvedConfigs ) ; result . put ( element , new PropertyInfo ( proxy ) ) ; return result ;
public class DOMUtils { /** * Parse the given XML stream and return the root Element */ public static Element parse ( InputStream xmlStream , DocumentBuilder builder ) throws IOException { } }
try { Document doc ; synchronized ( builder ) // synchronize to prevent concurrent parsing on the same DocumentBuilder { doc = builder . parse ( xmlStream ) ; } return doc . getDocumentElement ( ) ; } catch ( SAXException se ) { throw new IOException ( se . toString ( ) ) ; } finally { xmlStream . close ( ) ; }
public class UserAttrs { /** * Returns user - defined - attribute * @ param path * @ param attribute user : attribute name . user : can be omitted . * @ param def Default value if attribute doesn ' t exist * @ param options * @ return * @ throws IOException */ public static final long getLongAttribute ( Path path , String attribute , long def , LinkOption ... options ) throws IOException { } }
attribute = attribute . startsWith ( "user:" ) ? attribute : "user:" + attribute ; byte [ ] attr = ( byte [ ] ) Files . getAttribute ( path , attribute , options ) ; if ( attr == null ) { return def ; } if ( attr . length != 8 ) { throw new IllegalArgumentException ( attribute + " not correct type" ) ; } return Primitives . readLong ( attr ) ;
public class AuthenticationUtils { /** * Converts an instance of User to JSON object , fit for a cookie * @ param user Instance of User to convert * @ return JSON string containing the user state * @ throws Exception */ static public String userToCookieString ( boolean loggedIn , User user ) throws Exception { } }
JSONObject cookieObj = new JSONObject ( ) ; cookieObj . put ( "logged" , loggedIn ) ; JSONObject userObj = user . toJSON ( ) ; cookieObj . put ( "user" , userObj ) ; StringWriter sw = new StringWriter ( ) ; cookieObj . write ( sw ) ; String cookieRaw = sw . toString ( ) ; String cookieStr = URLEncoder . encode ( cookieRaw , "UTF-8" ) ; cookieStr = cookieStr . replace ( "+" , "%20" ) ; return cookieStr ;
public class MatchAllScorer { /** * { @ inheritDoc } */ @ Override public void score ( Collector collector ) throws IOException { } }
collector . setScorer ( this ) ; while ( nextDoc ( ) != NO_MORE_DOCS ) { collector . collect ( docID ( ) ) ; }
public class ButtonSerializer { /** * ( non - Javadoc ) * @ see com . google . gson . JsonSerializer # serialize ( java . lang . Object , * java . lang . reflect . Type , com . google . gson . JsonSerializationContext ) */ public JsonElement serialize ( Button src , Type typeOfSrc , JsonSerializationContext context ) { } }
ButtonType buttonType = src . getType ( ) ; Class < ? > buttonClass = getButtonClass ( buttonType ) ; return context . serialize ( src , buttonClass ) ;
public class QuerySplitterImpl { /** * Verifies that the given query can be properly scattered . * @ param query the query to verify * @ throws IllegalArgumentException if the query is invalid . */ private void validateQuery ( Query query ) throws IllegalArgumentException { } }
if ( query . getKindCount ( ) != 1 ) { throw new IllegalArgumentException ( "Query must have exactly one kind." ) ; } if ( query . getOrderCount ( ) != 0 ) { throw new IllegalArgumentException ( "Query cannot have any sort orders." ) ; } if ( query . hasFilter ( ) ) { validateFilter ( query . getFilter ( ) ) ; }
public class DetachVolumeRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DetachVolumeRequest > getDryRunRequest ( ) { } }
Request < DetachVolumeRequest > request = new DetachVolumeRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class Entity { /** * Deprecated */ public void addReference ( List < Term > span ) { } }
this . references . add ( KAFDocument . < Term > list2Span ( span ) ) ;
public class GVRPose { /** * Sets the world position of a root bone and propagates to all children . * This has the effect of moving the overall skeleton to a new position * without affecting the orientation of it ' s bones . * @ param x , y , znew world position of root bone . * @ return true if world position set , false if bone is not a root bone . * @ see # setWorldPositions * @ see # getWorldPosition */ public boolean setPosition ( float x , float y , float z ) { } }
Bone bone = mBones [ 0 ] ; float dx = x - bone . WorldMatrix . m30 ( ) ; float dy = y - bone . WorldMatrix . m31 ( ) ; float dz = z - bone . WorldMatrix . m32 ( ) ; sync ( ) ; bone . LocalMatrix . setTranslation ( x , y , z ) ; for ( int i = 0 ; i < mBones . length ; ++ i ) { bone = mBones [ i ] ; bone . WorldMatrix . m30 ( bone . WorldMatrix . m30 ( ) + dx ) ; bone . WorldMatrix . m31 ( bone . WorldMatrix . m31 ( ) + dy ) ; bone . WorldMatrix . m32 ( bone . WorldMatrix . m32 ( ) + dz ) ; } if ( sDebug ) { Log . d ( "BONE" , "setWorldPosition: %s " , mSkeleton . getBoneName ( 0 ) , bone . toString ( ) ) ; } return true ;
public class CacheControl { /** * Returns a new { @ link StringBuilder } with the common directives appended . * Note that the first two characters ( { @ code " , " } must be stripped . */ protected final StringBuilder newHeaderValueBuffer ( ) { } }
final StringBuilder buf = new StringBuilder ( 40 ) ; if ( noCache ) { buf . append ( ", no-cache" ) ; } if ( noStore ) { buf . append ( ", no-store" ) ; } if ( noTransform ) { buf . append ( ", no-transform" ) ; } if ( maxAgeSeconds >= 0 ) { buf . append ( ", max-age=" ) . append ( maxAgeSeconds ) ; } return buf ;
public class AmazonElasticLoadBalancingClient { /** * Adds the specified Availability Zones to the set of Availability Zones for the specified load balancer in * EC2 - Classic or a default VPC . * For load balancers in a non - default VPC , use < a > AttachLoadBalancerToSubnets < / a > . * The load balancer evenly distributes requests across all its registered Availability Zones that contain * instances . For more information , see < a * href = " http : / / docs . aws . amazon . com / elasticloadbalancing / latest / classic / enable - disable - az . html " > Add or Remove * Availability Zones < / a > in the < i > Classic Load Balancers Guide < / i > . * @ param enableAvailabilityZonesForLoadBalancerRequest * Contains the parameters for EnableAvailabilityZonesForLoadBalancer . * @ return Result of the EnableAvailabilityZonesForLoadBalancer operation returned by the service . * @ throws LoadBalancerNotFoundException * The specified load balancer does not exist . * @ sample AmazonElasticLoadBalancing . EnableAvailabilityZonesForLoadBalancer * @ see < a * href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancing - 2012-06-01 / EnableAvailabilityZonesForLoadBalancer " * target = " _ top " > AWS API Documentation < / a > */ @ Override public EnableAvailabilityZonesForLoadBalancerResult enableAvailabilityZonesForLoadBalancer ( EnableAvailabilityZonesForLoadBalancerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeEnableAvailabilityZonesForLoadBalancer ( request ) ;
public class CQLTranslator { /** * Build where clause with given clause . * @ param builder * the builder * @ param fieldClazz * the field clazz * @ param field * the field * @ param value * the value * @ param clause * the clause * @ param useToken * the use token */ public void buildWhereClause ( StringBuilder builder , Class fieldClazz , String field , Object value , String clause , boolean useToken ) { } }
builder = onWhereClause ( builder , fieldClazz , field , value , clause , useToken ) ; builder . append ( AND_CLAUSE ) ;
public class PartyRole { /** * Sets the value of the primaryRolePlayer property . * @ param value * allowed object is { @ link com . ibm . wsspi . security . wim . model . RolePlayer } */ public void setPrimaryRolePlayer ( com . ibm . wsspi . security . wim . model . RolePlayer value ) { } }
this . primaryRolePlayer = value ;
public class A_CmsListDialog { /** * Stores the given object as " list object " for the given list dialog in the current users session . < p > * @ param listDialog the list dialog class * @ param listObject the list to store */ public void setListObject ( Class < ? > listDialog , CmsHtmlList listObject ) { } }
if ( listObject == null ) { // null object : remove the entry from the map getListObjectMap ( getSettings ( ) ) . remove ( listDialog . getName ( ) ) ; } else { if ( ( listObject . getMetadata ( ) != null ) && listObject . getMetadata ( ) . isVolatile ( ) ) { listObject . setMetadata ( null ) ; } getListObjectMap ( getSettings ( ) ) . put ( listDialog . getName ( ) , listObject ) ; }
public class MariaDbConnection { /** * < p > Undoes all changes made after the given < code > Savepoint < / code > object was set . < / p > * < p > This method should be used only when auto - commit has been disabled . < / p > * @ param savepoint the < code > Savepoint < / code > object to roll back to * @ throws SQLException if a database access error occurs , this method is * called while participating in a distributed * transaction , this method is called on a closed * connection , the * < code > Savepoint < / code > object is no longer valid , or * this < code > Connection < / code > object is currently in * auto - commit mode * @ see Savepoint * @ see # rollback */ public void rollback ( final Savepoint savepoint ) throws SQLException { } }
try ( Statement st = createStatement ( ) ) { st . execute ( "ROLLBACK TO SAVEPOINT " + savepoint . toString ( ) ) ; }
public class Main { /** * Read file or url specified by < tt > path < / tt > . * @ return file or url content as < tt > byte [ ] < / tt > or as < tt > String < / tt > if * < tt > convertToString < / tt > is true . */ private static Object readFileOrUrl ( String path , boolean convertToString ) throws IOException { } }
return SourceReader . readFileOrUrl ( path , convertToString , shellContextFactory . getCharacterEncoding ( ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TopoPointPropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link TopoPointPropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "topoPointProperty" ) public JAXBElement < TopoPointPropertyType > createTopoPointProperty ( TopoPointPropertyType value ) { } }
return new JAXBElement < TopoPointPropertyType > ( _TopoPointProperty_QNAME , TopoPointPropertyType . class , null , value ) ;
public class HINReader { /** * Private method that actually parses the input to read a ChemFile * object . In its current state it is able to read all the molecules * ( if more than one is present ) in the specified HIN file . These are * placed in a MoleculeSet object which in turn is placed in a ChemModel * which in turn is placed in a ChemSequence object and which is finally * placed in a ChemFile object and returned to the user . * @ return A ChemFile containing the data parsed from input . */ private IChemFile readChemFile ( IChemFile file ) { } }
IChemSequence chemSequence = file . getBuilder ( ) . newInstance ( IChemSequence . class ) ; IChemModel chemModel = file . getBuilder ( ) . newInstance ( IChemModel . class ) ; IAtomContainerSet setOfMolecules = file . getBuilder ( ) . newInstance ( IAtomContainerSet . class ) ; String info ; StringTokenizer tokenizer ; List < String > aroringText = new ArrayList < String > ( ) ; List < IAtomContainer > mols = new ArrayList < IAtomContainer > ( ) ; try { String line ; // read in header info while ( true ) { line = input . readLine ( ) ; if ( line . startsWith ( "mol" ) ) { info = getMolName ( line ) ; break ; } } // start the actual molecule data - may be multiple molecule line = input . readLine ( ) ; while ( true ) { if ( line == null ) break ; // end of file if ( line . startsWith ( ";" ) ) continue ; // comment line if ( line . startsWith ( "mol" ) ) { info = getMolName ( line ) ; line = input . readLine ( ) ; } IAtomContainer m = file . getBuilder ( ) . newInstance ( IAtomContainer . class ) ; m . setTitle ( info ) ; // Each element of cons is an ArrayList of length 3 which stores // the start and end indices and bond order of each bond // found in the HIN file . Before adding bonds we need to reduce // the number of bonds so as not to count the same bond twice List < List < Object > > cons = new ArrayList < List < Object > > ( ) ; // read data for current molecule int atomSerial = 0 ; while ( true ) { if ( line == null || line . contains ( "endmol" ) ) { break ; } if ( line . startsWith ( ";" ) ) continue ; // comment line tokenizer = new StringTokenizer ( line , " " ) ; int ntoken = tokenizer . countTokens ( ) ; String [ ] toks = new String [ ntoken ] ; for ( int i = 0 ; i < ntoken ; i ++ ) toks [ i ] = tokenizer . nextToken ( ) ; String sym = toks [ 3 ] ; double charge = Double . parseDouble ( toks [ 6 ] ) ; double x = Double . parseDouble ( toks [ 7 ] ) ; double y = Double . parseDouble ( toks [ 8 ] ) ; double z = Double . parseDouble ( toks [ 9 ] ) ; int nbond = Integer . parseInt ( toks [ 10 ] ) ; IAtom atom = file . getBuilder ( ) . newInstance ( IAtom . class , sym , new Point3d ( x , y , z ) ) ; atom . setCharge ( charge ) ; IBond . Order bo = IBond . Order . SINGLE ; for ( int j = 11 ; j < ( 11 + nbond * 2 ) ; j += 2 ) { int s = Integer . parseInt ( toks [ j ] ) - 1 ; // since atoms start from 1 in the file char bt = toks [ j + 1 ] . charAt ( 0 ) ; switch ( bt ) { case 's' : bo = IBond . Order . SINGLE ; break ; case 'd' : bo = IBond . Order . DOUBLE ; break ; case 't' : bo = IBond . Order . TRIPLE ; break ; case 'a' : bo = IBond . Order . QUADRUPLE ; break ; } List < Object > ar = new ArrayList < Object > ( 3 ) ; ar . add ( atomSerial ) ; ar . add ( s ) ; ar . add ( bo ) ; cons . add ( ar ) ; } m . addAtom ( atom ) ; atomSerial ++ ; line = input . readLine ( ) ; } // now just store all the bonds we have for ( List < Object > ar : cons ) { IAtom s = m . getAtom ( ( Integer ) ar . get ( 0 ) ) ; IAtom e = m . getAtom ( ( Integer ) ar . get ( 1 ) ) ; IBond . Order bo = ( IBond . Order ) ar . get ( 2 ) ; if ( ! isConnected ( m , s , e ) ) m . addBond ( file . getBuilder ( ) . newInstance ( IBond . class , s , e , bo ) ) ; } mols . add ( m ) ; // we may not get a ' mol N ' immediately since // the aromaticring keyword might be present // and doesn ' t seem to be located within the molecule // block . However , if we do see this keyword we save this // since it can contain aromatic specs for any molecule // listed in the file // The docs do not explicitly state the the keyword comes // after * all * molecules . So we save and then reprocess // all the molecules in a second pass while ( true ) { line = input . readLine ( ) ; if ( line == null || line . startsWith ( "mol" ) ) break ; if ( line . startsWith ( "aromaticring" ) ) aroringText . add ( line . trim ( ) ) ; } } } catch ( IOException e ) { // FIXME : should make some noise now file = null ; } if ( aroringText . size ( ) > 0 ) { // process aromaticring annotations for ( String line : aroringText ) { String [ ] toks = line . split ( " " ) ; int natom = Integer . parseInt ( toks [ 1 ] ) ; int n = 0 ; for ( int i = 2 ; i < toks . length ; i += 2 ) { int molnum = Integer . parseInt ( toks [ i ] ) ; // starts from 1 int atnum = Integer . parseInt ( toks [ i + 1 ] ) ; // starts from 1 mols . get ( molnum - 1 ) . getAtom ( atnum - 1 ) . setFlag ( CDKConstants . ISAROMATIC , true ) ; n ++ ; } assert n == natom ; } } for ( IAtomContainer mol : mols ) setOfMolecules . addAtomContainer ( mol ) ; chemModel . setMoleculeSet ( setOfMolecules ) ; chemSequence . addChemModel ( chemModel ) ; file . addChemSequence ( chemSequence ) ; return file ;
public class PDFBoxTree { /** * Updates the font table by adding new fonts used at the current page . */ protected void updateFontTable ( ) { } }
PDResources resources = pdpage . getResources ( ) ; if ( resources != null ) { try { processFontResources ( resources , fontTable ) ; } catch ( IOException e ) { log . error ( "Error processing font resources: " + "Exception: {} {}" , e . getMessage ( ) , e . getClass ( ) ) ; } }