signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ComputerVisionImpl { /** * This operation generates a description of an image in human readable language with complete sentences . The description is based on a collection of content tags , which are also returned by the operation . More than one description can be generated for each image . Descriptions are ordered by their confidence score . All descriptions are in English . Two input methods are supported - - ( 1 ) Uploading an image or ( 2 ) specifying an image URL . A successful response will be returned in JSON . If the request failed , the response will contain an error code and a message to help understand what went wrong . * @ param url Publicly reachable URL of an image * @ param describeImageOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ComputerVisionErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ImageDescription object if successful . */ public ImageDescription describeImage ( String url , DescribeImageOptionalParameter describeImageOptionalParameter ) { } }
return describeImageWithServiceResponseAsync ( url , describeImageOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class RocksDbQueue { /** * { @ inheritDoc } */ @ Override public boolean queue ( IQueueMessage < ID , DATA > _msg ) { } }
IQueueMessage < ID , DATA > msg = _msg . clone ( ) ; Date now = new Date ( ) ; msg . setNumRequeues ( 0 ) . setQueueTimestamp ( now ) . setTimestamp ( now ) ; try { return putToQueue ( msg , false ) ; } catch ( RocksDBException e ) { throw new QueueException ( e ) ; }
public class SqlParamUtils { /** * 1つのパラメータの設定 * パラメータ値は以下の表記が可能 * < dl > * < dh > [ NULL ] < / dh > * < dd > < code > null < / code > を設定する < / dd > * < dh > [ EMPTY ] < / dh > * < dd > " " ( 空文字 ) を設定する < / dd > * < dh > ' 値 ' < / dh > * < dd > 文字列として設定する . 空白を含めることもできる < / dd > * < dh > [ 値1 , 値2 , . . . ] < / dh > * < dd > 配列として設定する < / dd > * < dh > その他 < / dh > * < dd > 文字列として設定する < / dd > * < / dl > * @ param ctx SqlContext * @ param key パラメータキー * @ param val パラメータ値 */ private static void setParam ( final SqlContext ctx , final String key , final String val ) { } }
if ( val . startsWith ( "[" ) && val . endsWith ( "]" ) && ! ( val . equals ( "[NULL]" ) || val . equals ( "[EMPTY]" ) ) ) { // [ ] で囲まれた値は配列に変換する 。 ex ) [ 1 , 2 ] = > { " 1 " , " 2 " } String [ ] parts = val . substring ( 1 , val . length ( ) - 1 ) . split ( "\\s*,\\s*" ) ; Object [ ] vals = new Object [ parts . length ] ; for ( int i = 0 ; i < parts . length ; i ++ ) { vals [ i ] = convertSingleValue ( parts [ i ] ) ; } ctx . paramList ( key , vals ) ; } else { ctx . param ( key , convertSingleValue ( val ) ) ; }
public class SparseLongArray { /** * Puts a key / value pair into the array , optimizing for the case where * the key is greater than all existing keys in the array . */ public void append ( int key , long value ) { } }
if ( mSize != 0 && key <= mKeys [ mSize - 1 ] ) { put ( key , value ) ; return ; } int pos = mSize ; if ( pos >= mKeys . length ) { growKeyAndValueArrays ( pos + 1 ) ; } mKeys [ pos ] = key ; mValues [ pos ] = value ; mSize = pos + 1 ;
public class MQLinkHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . MQLinkLocalization # delete ( ) */ public void delete ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "delete" ) ; // Mark the destination for deletion try { // Set the deletion flag in the DH persistently . A transaction per DH ? ? LocalTransaction siTran = txManager . createLocalTransaction ( true ) ; setToBeDeleted ( true ) ; // Adjust the destination lookups in Destination Manager destinationManager . getLinkIndex ( ) . delete ( this ) ; requestUpdate ( ( Transaction ) siTran ) ; // commit the transaction siTran . commit ( ) ; String name = _mqLinkName ; if ( name == null ) name = getName ( ) ; SibTr . info ( tc , "MQLINK_DEST_DELETE_INFO_CWSIP0064" , new Object [ ] { name , _mqLinkUuid } ) ; } catch ( MessageStoreException e ) { // No FFDC code needed SibTr . exception ( tc , e ) ; // throw e ; } catch ( SIException e ) { // No FFDC code needed SibTr . exception ( tc , e ) ; // handleRollback ( siTran ) ; // throw e ; } // Now start the asynch deletion thread to tidy up destinationManager . startAsynchDeletion ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "delete" ) ;
public class AuthRequest { /** * Return the full IDP redirect url with encoded SAML request * @ return String SAML request url * @ throws XMLStreamException * @ throws IOException */ public String getRedirectUrl ( ) throws XMLStreamException , IOException { } }
String url = this . settings . getIdpSsoTargetUrl ( ) ; url += "?SAMLRequest=" ; url += URLEncoder . encode ( this . getXmlBase64Request ( ) , "UTF-8" ) ; if ( this . parameters != null ) { for ( Map . Entry < String , String > param : this . parameters . entrySet ( ) ) { String key = URLEncoder . encode ( param . getKey ( ) , "UTF-8" ) ; String val = URLEncoder . encode ( param . getValue ( ) , "UTF-8" ) ; url += "&" + key + "=" + val ; } } return url ;
public class SibDiagnosticModule { /** * Generates a string representation of an object for FFDC . * @ param obj * Object to generate a string representation of * @ return * The string representation of the object */ protected String toFFDCStringSingleObject ( Object obj ) { } }
if ( obj == null ) { return "<null>" ; } else if ( obj instanceof Traceable ) { return ( ( Traceable ) obj ) . toTraceString ( ) ; } else if ( obj instanceof String ) { return ( ( String ) obj ) ; } else if ( obj instanceof byte [ ] ) { return toFFDCString ( ( byte [ ] ) obj ) ; } else { return obj . toString ( ) ; }
public class KeyVaultCredentials { /** * Builds request with authenticated header . Protects request body if supported . * @ param originalRequest * unprotected request without auth token . * @ param challengeMap * the challenge map . * @ return Pair of protected request and HttpMessageSecurity used for * encryption . */ private Pair < Request , HttpMessageSecurity > buildAuthenticatedRequest ( Request originalRequest , Map < String , String > challengeMap ) throws IOException { } }
Boolean supportsPop = supportsMessageProtection ( originalRequest . url ( ) . toString ( ) , challengeMap ) ; // if the service supports pop and a clientEncryptionKey has not been generated yet , generate // the key that will be used for encryption on this and all subsequent protected requests if ( supportsPop && this . clientEncryptionKey == null ) { try { final KeyPairGenerator generator = KeyPairGenerator . getInstance ( CLIENT_ENCRYPTION_KEY_TYPE ) ; generator . initialize ( CLIENT_ENCRYPTION_KEY_SIZE ) ; this . clientEncryptionKey = JsonWebKey . fromRSA ( generator . generateKeyPair ( ) ) . withKid ( UUID . randomUUID ( ) . toString ( ) ) ; } catch ( NoSuchAlgorithmException e ) { throw new RuntimeException ( e ) ; } } AuthenticationResult authResult = getAuthenticationCredentials ( supportsPop , challengeMap ) ; if ( authResult == null ) { return null ; } HttpMessageSecurity httpMessageSecurity = new HttpMessageSecurity ( authResult . getAuthToken ( ) , supportsPop ? authResult . getPopKey ( ) : "" , supportsPop ? challengeMap . get ( "x-ms-message-encryption-key" ) : "" , supportsPop ? challengeMap . get ( "x-ms-message-signing-key" ) : "" , this . clientEncryptionKey ) ; Request request = httpMessageSecurity . protectRequest ( originalRequest ) ; return Pair . of ( request , httpMessageSecurity ) ;
public class RegisterInstanceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RegisterInstanceRequest registerInstanceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( registerInstanceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( registerInstanceRequest . getStackId ( ) , STACKID_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getHostname ( ) , HOSTNAME_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getPublicIp ( ) , PUBLICIP_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getPrivateIp ( ) , PRIVATEIP_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getRsaPublicKey ( ) , RSAPUBLICKEY_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getRsaPublicKeyFingerprint ( ) , RSAPUBLICKEYFINGERPRINT_BINDING ) ; protocolMarshaller . marshall ( registerInstanceRequest . getInstanceIdentity ( ) , INSTANCEIDENTITY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VoltTrace { /** * Creates a begin duration trace event . This method does not queue the * event . Call { @ link TraceEventBatch # add ( Supplier ) } to queue the event . */ public static TraceEvent beginDuration ( String name , Object ... args ) { } }
return new TraceEvent ( TraceEventType . DURATION_BEGIN , name , null , args ) ;
public class FileBasedJobLockFactory { /** * Try locking the lock . * @ return < em > true < / em > if the lock is successfully locked , * < em > false < / em > if otherwise . * @ throws JobLockException thrown if the { @ link JobLock } fails to be acquired */ boolean tryLock ( Path lockFile ) throws JobLockException { } }
log . debug ( "Attempting lock: {}" , lockFile ) ; try { return this . fs . createNewFile ( lockFile ) ; } catch ( IOException e ) { throw new JobLockException ( e ) ; }
public class SelfCalibrationLinearRotationMulti { /** * Extracts calibration for the reference frame */ void extractReferenceW ( DMatrixRMaj nv ) { } }
W0 . a11 = nv . data [ 0 ] ; W0 . a12 = W0 . a21 = nv . data [ 1 ] ; W0 . a13 = W0 . a31 = nv . data [ 2 ] ; W0 . a22 = nv . data [ 3 ] ; W0 . a23 = W0 . a32 = nv . data [ 4 ] ; W0 . a33 = nv . data [ 5 ] ;
public class Hex { /** * Gets escaped hex string corresponding to the given bytes . * @ param bytes bytes . * @ return escaped hex string */ public static String getEscaped ( byte ... bytes ) { } }
final StringBuilder bld = new StringBuilder ( ) ; for ( byte b : bytes ) { bld . append ( "\\" ) . append ( Hex . get ( b ) ) ; } return bld . toString ( ) ;
public class SymbolUtils { /** * Creates a map of { @ link Symbol } s from a map of { @ link String } s . No keys or values may be * null . */ public static ImmutableMap < Symbol , Symbol > mapFrom ( Map < String , String > stringMap ) { } }
final ImmutableMap . Builder < Symbol , Symbol > ret = ImmutableMap . builder ( ) ; for ( Map . Entry < String , String > stringEntry : stringMap . entrySet ( ) ) { ret . put ( Symbol . from ( stringEntry . getKey ( ) ) , Symbol . from ( stringEntry . getValue ( ) ) ) ; } return ret . build ( ) ;
public class Category { /** * Log a localized message . The user supplied parameter { @ code key } is replaced by its localized version from the * resource bundle . * @ param priority * Priority for log entry * @ param key * Resource key for translation * @ param t * Exception to log * @ see # setResourceBundle * @ since 0.8.4 */ public void l7dlog ( final Priority priority , final String key , final Throwable t ) { } }
ResourceBundle bundle = this . bundle ; String message = bundle == null ? key : bundle . getString ( key ) ; provider . log ( STACKTRACE_DEPTH , null , translatePriority ( priority ) , t , message , ( Object [ ] ) null ) ;
public class BindUploader { /** * Compute the number of array bind values in the given bind map * @ param bindValues the bind map * @ return 0 if bindValues is null , has no binds , or is not an array bind * n otherwise , where n is the number of binds in the array bind */ public static int arrayBindValueCount ( Map < String , ParameterBindingDTO > bindValues ) { } }
if ( ! isArrayBind ( bindValues ) ) { return 0 ; } else { ParameterBindingDTO bindSample = bindValues . values ( ) . iterator ( ) . next ( ) ; List < String > bindSampleValues = ( List < String > ) bindSample . getValue ( ) ; return bindValues . size ( ) * bindSampleValues . size ( ) ; }
public class StrUtils { /** * Return first postion ignore case , return - 1 if not found */ public static int indexOfIgnoreCase ( final String str , final String searchStr ) { } }
// NOSONAR if ( searchStr . isEmpty ( ) || str . isEmpty ( ) ) { return str . indexOf ( searchStr ) ; } for ( int i = 0 ; i < str . length ( ) ; ++ i ) { if ( i + searchStr . length ( ) > str . length ( ) ) { return - 1 ; } int j = 0 ; int ii = i ; while ( ii < str . length ( ) && j < searchStr . length ( ) ) { char c = Character . toLowerCase ( str . charAt ( ii ) ) ; char c2 = Character . toLowerCase ( searchStr . charAt ( j ) ) ; if ( c != c2 ) { break ; } j ++ ; ii ++ ; } if ( j == searchStr . length ( ) ) { return i ; } } return - 1 ;
public class ObjectUtil { /** * Checks that the given argument is neither null nor empty . * If it is , throws { @ link NullPointerException } or { @ link IllegalArgumentException } . * Otherwise , returns the argument . */ public static < T > T [ ] checkNonEmpty ( T [ ] array , String name ) { } }
checkNotNull ( array , name ) ; checkPositive ( array . length , name + ".length" ) ; return array ;
public class Resource { /** * Set an enterprise custom field value . * @ param index field index * @ param value field value */ public void setEnterpriseCustomField ( int index , String value ) { } }
set ( selectField ( ResourceFieldLists . ENTERPRISE_CUSTOM_FIELD , index ) , value ) ;
public class CommerceDiscountLocalServiceBaseImpl { /** * Returns all the commerce discounts matching the UUID and company . * @ param uuid the UUID of the commerce discounts * @ param companyId the primary key of the company * @ return the matching commerce discounts , or an empty list if no matches were found */ @ Override public List < CommerceDiscount > getCommerceDiscountsByUuidAndCompanyId ( String uuid , long companyId ) { } }
return commerceDiscountPersistence . findByUuid_C ( uuid , companyId ) ;
public class CmsPermissionView { /** * Checks if a certain permission of a permission set is denied . < p > * @ param p the current CmsPermissionSet * @ param value the int value of the permission to check * @ return true if the permission is denied , otherwise false */ protected Boolean isDenied ( CmsPermissionSet p , int value ) { } }
if ( ( p . getDeniedPermissions ( ) & value ) > 0 ) { return Boolean . TRUE ; } return Boolean . FALSE ;
public class FullSegmentation { /** * 从树叶开始反向遍历生成全切分结果 * @ param node 树叶节点 * @ return 全切分结果 */ private List < Word > toWords ( Node node ) { } }
Stack < String > stack = new Stack < > ( ) ; while ( node != null ) { stack . push ( node . getText ( ) ) ; node = node . getParent ( ) ; } int len = stack . size ( ) ; List < Word > list = new ArrayList < > ( len ) ; for ( int i = 0 ; i < len ; i ++ ) { list . add ( new Word ( stack . pop ( ) ) ) ; } return list ;
public class ModelsImpl { /** * Get one entity role for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ param roleId entity role ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the EntityRole object */ public Observable < ServiceResponse < EntityRole > > getHierarchicalEntityRoleWithServiceResponseAsync ( UUID appId , String versionId , UUID hEntityId , UUID roleId ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( hEntityId == null ) { throw new IllegalArgumentException ( "Parameter hEntityId is required and cannot be null." ) ; } if ( roleId == null ) { throw new IllegalArgumentException ( "Parameter roleId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . getHierarchicalEntityRole ( appId , versionId , hEntityId , roleId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < EntityRole > > > ( ) { @ Override public Observable < ServiceResponse < EntityRole > > call ( Response < ResponseBody > response ) { try { ServiceResponse < EntityRole > clientResponse = getHierarchicalEntityRoleDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class TypeToken { /** * Returns the { @ link Invokable } for { @ code method } , which must be a member of { @ code T } . * @ since 14.0 */ public final Invokable < T , Object > method ( Method method ) { } }
checkArgument ( of ( method . getDeclaringClass ( ) ) . isAssignableFrom ( this ) , "%s not declared by %s" , method , this ) ; return new Invokable . MethodInvokable < T > ( method ) { @ Override Type getGenericReturnType ( ) { return resolveType ( super . getGenericReturnType ( ) ) . getType ( ) ; } @ Override Type [ ] getGenericParameterTypes ( ) { return resolveInPlace ( super . getGenericParameterTypes ( ) ) ; } @ Override Type [ ] getGenericExceptionTypes ( ) { return resolveInPlace ( super . getGenericExceptionTypes ( ) ) ; } @ Override public TypeToken < T > getOwnerType ( ) { return TypeToken . this ; } @ Override public String toString ( ) { return getOwnerType ( ) + "." + super . toString ( ) ; } } ;
public class IonLobLite { /** * Calculate LOB hash code as XOR of seed with CRC - 32 of the LOB data . * This distinguishes BLOBs from CLOBs * @ param seed Seed value * @ return hash code */ protected int lobHashCode ( int seed , SymbolTableProvider symbolTableProvider ) { } }
int result = seed ; if ( ! isNullValue ( ) ) { CRC32 crc = new CRC32 ( ) ; crc . update ( getBytes ( ) ) ; result ^= ( int ) crc . getValue ( ) ; } return hashTypeAnnotations ( result , symbolTableProvider ) ;
public class InMemoryEventLoggingListener { /** * Convenience method for getting only the names of the events . * @ return list of event names , by order of selection . */ public List < String > eventNames ( ) { } }
return events . stream ( ) . map ( BEvent :: getName ) . collect ( toList ( ) ) ;
public class CorrelationIdPlugin { /** * Postprocessor method that assigns the Correlation - Id from the * request to a header on the Response . * @ param request the incoming Request . * @ param response the outgoing Response . */ @ Override public void process ( Request request , Response response ) { } }
if ( ! response . hasHeader ( CORRELATION_ID ) ) { response . addHeader ( CORRELATION_ID , request . getHeader ( CORRELATION_ID ) ) ; }
public class MagickUtil { /** * Converts a bi - level { @ code MagickImage } to a { @ code BufferedImage } , of * type { @ code TYPE _ BYTE _ BINARY } . * @ param pImage the original { @ code MagickImage } * @ return a new { @ code BufferedImage } * @ throws MagickException if an exception occurs during conversion * @ see BufferedImage */ private static BufferedImage bilevelToBuffered ( MagickImage pImage ) throws MagickException { } }
// As there is no way to get the binary representation of the image , // convert to gray , and the create a binary image from it BufferedImage temp = grayToBuffered ( pImage , false ) ; BufferedImage image = new BufferedImage ( temp . getWidth ( ) , temp . getHeight ( ) , BufferedImage . TYPE_BYTE_BINARY , CM_MONOCHROME ) ; ImageUtil . drawOnto ( image , temp ) ; return image ;
public class Codecs { /** * Return a scala { @ code Codec } with the given allele { @ link Supplier } , * allele { @ code validator } and { @ code Chromosome } length . The * { @ code supplier } is responsible for creating new random alleles , and the * { @ code validator } can verify it . * @ param < A > the allele type * @ param supplier the allele - supplier which is used for creating new , * random alleles * @ param validator the validator used for validating the created gene . This * predicate is used in the { @ link AnyGene # isValid ( ) } method . * @ param length the vector length * @ return a new { @ code Codec } with the given parameters * @ throws NullPointerException if one of the parameters is { @ code null } * @ throws IllegalArgumentException if the length of the vector is smaller * than one . */ public static < A > Codec < ISeq < A > , AnyGene < A > > ofVector ( final Supplier < ? extends A > supplier , final Predicate < ? super A > validator , final int length ) { } }
return ofVector ( supplier , validator , Predicates . < ISeq < A > > True ( ) , length ) ;
public class Connector { /** * Create a name from the given namespace URI and local name . This is equivalent to calling " * < code > factories ( ) . getNameFactory ( ) . create ( namespaceUri , localName ) < / code > " , and is simply provided for convenience . * @ param namespaceUri the namespace URI * @ param localName the local name * @ return the new name * @ throws IllegalArgumentException if the local name is < code > null < / code > or empty * @ see NameFactory # create ( String , TextDecoder ) * @ see NameFactory # create ( String , String , TextDecoder ) * @ see NameFactory # create ( String , String ) * @ see # nameFrom ( String ) * @ see # nameFrom ( String , String , TextDecoder ) */ protected final Name nameFrom ( String namespaceUri , String localName ) { } }
return factories ( ) . getNameFactory ( ) . create ( namespaceUri , localName ) ;
public class JMatrix { /** * Finds all eigenvalues of an upper Hessenberg matrix A [ 0 . . n - 1 ] [ 0 . . n - 1 ] . * On input A can be exactly as output from elmhes and eltran . On output , d and e * contain the eigenvalues of A , while V is a matrix whose columns contain * the corresponding eigenvectors . The eigenvalues are not sorted , except * that complex conjugate pairs appear consecutively with the eigenvalue * having the positive imaginary part . For a complex eigenvalue , only the * eigenvector corresponding to the eigenvalue with positive imaginary part * is stored , with real part in V [ 0 . . n - 1 ] [ i ] and imaginary part in V [ 0 . . n - 1 ] [ i + 1 ] . * The eigenvectors are not normalized . */ private static void hqr2 ( DenseMatrix A , DenseMatrix V , double [ ] d , double [ ] e ) { } }
int n = A . nrows ( ) ; int nn , m , l , k , j , its , i , mmin , na ; double z = 0.0 , y , x , w , v , u , t , s = 0.0 , r = 0.0 , q = 0.0 , p = 0.0 , anorm = 0.0 , ra , sa , vr , vi ; for ( i = 0 ; i < n ; i ++ ) { for ( j = Math . max ( i - 1 , 0 ) ; j < n ; j ++ ) { anorm += Math . abs ( A . get ( i , j ) ) ; } } nn = n - 1 ; t = 0.0 ; while ( nn >= 0 ) { its = 0 ; do { for ( l = nn ; l > 0 ; l -- ) { s = Math . abs ( A . get ( l - 1 , l - 1 ) ) + Math . abs ( A . get ( l , l ) ) ; if ( s == 0.0 ) { s = anorm ; } if ( Math . abs ( A . get ( l , l - 1 ) ) <= Math . EPSILON * s ) { A . set ( l , l - 1 , 0.0 ) ; break ; } } x = A . get ( nn , nn ) ; if ( l == nn ) { d [ nn ] = x + t ; A . set ( nn , nn , x + t ) ; nn -- ; } else { y = A . get ( nn - 1 , nn - 1 ) ; w = A . get ( nn , nn - 1 ) * A . get ( nn - 1 , nn ) ; if ( l == nn - 1 ) { p = 0.5 * ( y - x ) ; q = p * p + w ; z = Math . sqrt ( Math . abs ( q ) ) ; x += t ; A . set ( nn , nn , x ) ; A . set ( nn - 1 , nn - 1 , y + t ) ; if ( q >= 0.0 ) { z = p + Math . copySign ( z , p ) ; d [ nn - 1 ] = d [ nn ] = x + z ; if ( z != 0.0 ) { d [ nn ] = x - w / z ; } x = A . get ( nn , nn - 1 ) ; s = Math . abs ( x ) + Math . abs ( z ) ; p = x / s ; q = z / s ; r = Math . sqrt ( p * p + q * q ) ; p /= r ; q /= r ; for ( j = nn - 1 ; j < n ; j ++ ) { z = A . get ( nn - 1 , j ) ; A . set ( nn - 1 , j , q * z + p * A . get ( nn , j ) ) ; A . set ( nn , j , q * A . get ( nn , j ) - p * z ) ; } for ( i = 0 ; i <= nn ; i ++ ) { z = A . get ( i , nn - 1 ) ; A . set ( i , nn - 1 , q * z + p * A . get ( i , nn ) ) ; A . set ( i , nn , q * A . get ( i , nn ) - p * z ) ; } for ( i = 0 ; i < n ; i ++ ) { z = V . get ( i , nn - 1 ) ; V . set ( i , nn - 1 , q * z + p * V . get ( i , nn ) ) ; V . set ( i , nn , q * V . get ( i , nn ) - p * z ) ; } } else { d [ nn ] = x + p ; e [ nn ] = - z ; d [ nn - 1 ] = d [ nn ] ; e [ nn - 1 ] = - e [ nn ] ; } nn -= 2 ; } else { if ( its == 30 ) { throw new IllegalArgumentException ( "Too many iterations in hqr" ) ; } if ( its == 10 || its == 20 ) { t += x ; for ( i = 0 ; i < nn + 1 ; i ++ ) { A . sub ( i , i , x ) ; } s = Math . abs ( A . get ( nn , nn - 1 ) ) + Math . abs ( A . get ( nn - 1 , nn - 2 ) ) ; y = x = 0.75 * s ; w = - 0.4375 * s * s ; } ++ its ; for ( m = nn - 2 ; m >= l ; m -- ) { z = A . get ( m , m ) ; r = x - z ; s = y - z ; p = ( r * s - w ) / A . get ( m + 1 , m ) + A . get ( m , m + 1 ) ; q = A . get ( m + 1 , m + 1 ) - z - r - s ; r = A . get ( m + 2 , m + 1 ) ; s = Math . abs ( p ) + Math . abs ( q ) + Math . abs ( r ) ; p /= s ; q /= s ; r /= s ; if ( m == l ) { break ; } u = Math . abs ( A . get ( m , m - 1 ) ) * ( Math . abs ( q ) + Math . abs ( r ) ) ; v = Math . abs ( p ) * ( Math . abs ( A . get ( m - 1 , m - 1 ) ) + Math . abs ( z ) + Math . abs ( A . get ( m + 1 , m + 1 ) ) ) ; if ( u <= Math . EPSILON * v ) { break ; } } for ( i = m ; i < nn - 1 ; i ++ ) { A . set ( i + 2 , i , 0.0 ) ; if ( i != m ) { A . set ( i + 2 , i - 1 , 0.0 ) ; } } for ( k = m ; k < nn ; k ++ ) { if ( k != m ) { p = A . get ( k , k - 1 ) ; q = A . get ( k + 1 , k - 1 ) ; r = 0.0 ; if ( k + 1 != nn ) { r = A . get ( k + 2 , k - 1 ) ; } if ( ( x = Math . abs ( p ) + Math . abs ( q ) + Math . abs ( r ) ) != 0.0 ) { p /= x ; q /= x ; r /= x ; } } if ( ( s = Math . copySign ( Math . sqrt ( p * p + q * q + r * r ) , p ) ) != 0.0 ) { if ( k == m ) { if ( l != m ) { A . set ( k , k - 1 , - A . get ( k , k - 1 ) ) ; } } else { A . set ( k , k - 1 , - s * x ) ; } p += s ; x = p / s ; y = q / s ; z = r / s ; q /= p ; r /= p ; for ( j = k ; j < n ; j ++ ) { p = A . get ( k , j ) + q * A . get ( k + 1 , j ) ; if ( k + 1 != nn ) { p += r * A . get ( k + 2 , j ) ; A . sub ( k + 2 , j , p * z ) ; } A . sub ( k + 1 , j , p * y ) ; A . sub ( k , j , p * x ) ; } mmin = nn < k + 3 ? nn : k + 3 ; for ( i = 0 ; i < mmin + 1 ; i ++ ) { p = x * A . get ( i , k ) + y * A . get ( i , k + 1 ) ; if ( k + 1 != nn ) { p += z * A . get ( i , k + 2 ) ; A . sub ( i , k + 2 , p * r ) ; } A . sub ( i , k + 1 , p * q ) ; A . sub ( i , k , p ) ; } for ( i = 0 ; i < n ; i ++ ) { p = x * V . get ( i , k ) + y * V . get ( i , k + 1 ) ; if ( k + 1 != nn ) { p += z * V . get ( i , k + 2 ) ; V . sub ( i , k + 2 , p * r ) ; } V . sub ( i , k + 1 , p * q ) ; V . sub ( i , k , p ) ; } } } } } } while ( l + 1 < nn ) ; } if ( anorm != 0.0 ) { for ( nn = n - 1 ; nn >= 0 ; nn -- ) { p = d [ nn ] ; q = e [ nn ] ; na = nn - 1 ; if ( q == 0.0 ) { m = nn ; A . set ( nn , nn , 1.0 ) ; for ( i = nn - 1 ; i >= 0 ; i -- ) { w = A . get ( i , i ) - p ; r = 0.0 ; for ( j = m ; j <= nn ; j ++ ) { r += A . get ( i , j ) * A . get ( j , nn ) ; } if ( e [ i ] < 0.0 ) { z = w ; s = r ; } else { m = i ; if ( e [ i ] == 0.0 ) { t = w ; if ( t == 0.0 ) { t = Math . EPSILON * anorm ; } A . set ( i , nn , - r / t ) ; } else { x = A . get ( i , i + 1 ) ; y = A . get ( i + 1 , i ) ; q = Math . sqr ( d [ i ] - p ) + Math . sqr ( e [ i ] ) ; t = ( x * s - z * r ) / q ; A . set ( i , nn , t ) ; if ( Math . abs ( x ) > Math . abs ( z ) ) { A . set ( i + 1 , nn , ( - r - w * t ) / x ) ; } else { A . set ( i + 1 , nn , ( - s - y * t ) / z ) ; } } t = Math . abs ( A . get ( i , nn ) ) ; if ( Math . EPSILON * t * t > 1 ) { for ( j = i ; j <= nn ; j ++ ) { A . div ( j , nn , t ) ; } } } } } else if ( q < 0.0 ) { m = na ; if ( Math . abs ( A . get ( nn , na ) ) > Math . abs ( A . get ( na , nn ) ) ) { A . set ( na , na , q / A . get ( nn , na ) ) ; A . set ( na , nn , - ( A . get ( nn , nn ) - p ) / A . get ( nn , na ) ) ; } else { Complex temp = cdiv ( 0.0 , - A . get ( na , nn ) , A . get ( na , na ) - p , q ) ; A . set ( na , na , temp . re ( ) ) ; A . set ( na , nn , temp . im ( ) ) ; } A . set ( nn , na , 0.0 ) ; A . set ( nn , nn , 1.0 ) ; for ( i = nn - 2 ; i >= 0 ; i -- ) { w = A . get ( i , i ) - p ; ra = sa = 0.0 ; for ( j = m ; j <= nn ; j ++ ) { ra += A . get ( i , j ) * A . get ( j , na ) ; sa += A . get ( i , j ) * A . get ( j , nn ) ; } if ( e [ i ] < 0.0 ) { z = w ; r = ra ; s = sa ; } else { m = i ; if ( e [ i ] == 0.0 ) { Complex temp = cdiv ( - ra , - sa , w , q ) ; A . set ( i , na , temp . re ( ) ) ; A . set ( i , nn , temp . im ( ) ) ; } else { x = A . get ( i , i + 1 ) ; y = A . get ( i + 1 , i ) ; vr = Math . sqr ( d [ i ] - p ) + Math . sqr ( e [ i ] ) - q * q ; vi = 2.0 * q * ( d [ i ] - p ) ; if ( vr == 0.0 && vi == 0.0 ) { vr = Math . EPSILON * anorm * ( Math . abs ( w ) + Math . abs ( q ) + Math . abs ( x ) + Math . abs ( y ) + Math . abs ( z ) ) ; } Complex temp = cdiv ( x * r - z * ra + q * sa , x * s - z * sa - q * ra , vr , vi ) ; A . set ( i , na , temp . re ( ) ) ; A . set ( i , nn , temp . im ( ) ) ; if ( Math . abs ( x ) > Math . abs ( z ) + Math . abs ( q ) ) { A . set ( i + 1 , na , ( - ra - w * A . get ( i , na ) + q * A . get ( i , nn ) ) / x ) ; A . set ( i + 1 , nn , ( - sa - w * A . get ( i , nn ) - q * A . get ( i , na ) ) / x ) ; } else { temp = cdiv ( - r - y * A . get ( i , na ) , - s - y * A . get ( i , nn ) , z , q ) ; A . set ( i + 1 , na , temp . re ( ) ) ; A . set ( i + 1 , nn , temp . im ( ) ) ; } } } t = Math . max ( Math . abs ( A . get ( i , na ) ) , Math . abs ( A . get ( i , nn ) ) ) ; if ( Math . EPSILON * t * t > 1 ) { for ( j = i ; j <= nn ; j ++ ) { A . div ( j , na , t ) ; A . div ( j , nn , t ) ; } } } } } for ( j = n - 1 ; j >= 0 ; j -- ) { for ( i = 0 ; i < n ; i ++ ) { z = 0.0 ; for ( k = 0 ; k <= j ; k ++ ) { z += V . get ( i , k ) * A . get ( k , j ) ; } V . set ( i , j , z ) ; } } }
public class Disjunction { /** * Adds all assertions for the given IDisjunct to this disjunction . */ public Disjunction add ( IDisjunct disjunct ) { } }
for ( Iterator < IAssertion > assertions = disjunct . getAssertions ( ) ; assertions . hasNext ( ) ; ) { add ( assertions . next ( ) ) ; } return this ;
public class EventHubConnectionsInner { /** * Checks that the Event Hub data connection parameters are valid . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ param databaseName The name of the database in the Kusto cluster . * @ param parameters The Event Hub connection parameters supplied to the CreateOrUpdate operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the EventHubConnectionValidationListResultInner object if successful . */ public EventHubConnectionValidationListResultInner eventhubConnectionValidation ( String resourceGroupName , String clusterName , String databaseName , EventHubConnectionValidation parameters ) { } }
return eventhubConnectionValidationWithServiceResponseAsync ( resourceGroupName , clusterName , databaseName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Histogram3D { /** * Generate the frequency table . */ private void init ( double [ ] [ ] data , int xbins , int ybins , boolean prob ) { } }
// Generate the histogram . if ( data . length == 0 ) { throw new IllegalArgumentException ( "array is empty." ) ; } if ( data [ 0 ] . length != 2 ) { throw new IllegalArgumentException ( "dimension is not 2." ) ; } double xmin = data [ 0 ] [ 0 ] ; double xmax = data [ 0 ] [ 0 ] ; double ymin = data [ 0 ] [ 1 ] ; double ymax = data [ 0 ] [ 1 ] ; for ( int i = 1 ; i < data . length ; i ++ ) { if ( xmin > data [ i ] [ 0 ] ) { xmin = data [ i ] [ 0 ] ; } if ( xmax < data [ i ] [ 0 ] ) { xmax = data [ i ] [ 0 ] ; } if ( ymin > data [ i ] [ 1 ] ) { ymin = data [ i ] [ 1 ] ; } if ( ymax < data [ i ] [ 1 ] ) { ymax = data [ i ] [ 1 ] ; } } double xspan = xmax - xmin ; double xwidth = xspan / xbins ; double yspan = ymax - ymin ; double ywidth = yspan / ybins ; freq = new double [ xbins * ybins ] [ 3 ] ; freq [ 0 ] [ 0 ] = xmin + xwidth / 2 ; freq [ 0 ] [ 1 ] = ymin + ywidth / 2 ; for ( int i = 0 ; i < xbins ; i ++ ) { for ( int j = 0 ; j < ybins ; j ++ ) { freq [ j * xbins + i ] [ 0 ] = freq [ 0 ] [ 0 ] + xwidth * i ; freq [ j * xbins + i ] [ 1 ] = freq [ 0 ] [ 1 ] + ywidth * j ; } } for ( int k = 0 ; k < data . length ; k ++ ) { int i = ( int ) ( ( data [ k ] [ 0 ] - xmin ) / xwidth ) ; if ( i >= xbins ) { i = xbins - 1 ; } int j = ( int ) ( ( data [ k ] [ 1 ] - ymin ) / ywidth ) ; if ( j >= ybins ) { j = ybins - 1 ; } freq [ j * xbins + i ] [ 2 ] ++ ; } if ( prob ) { for ( int i = 0 ; i < freq . length ; i ++ ) { freq [ i ] [ 2 ] /= data . length ; } } max = Double . NEGATIVE_INFINITY ; for ( int i = 0 ; i < freq . length ; i ++ ) { if ( freq [ i ] [ 2 ] > max ) { max = freq [ i ] [ 2 ] ; } } if ( palette != null ) { width = max / palette . length ; } // calculate cube coordinates . topNW = new double [ freq . length ] [ 3 ] ; topNE = new double [ freq . length ] [ 3 ] ; topSW = new double [ freq . length ] [ 3 ] ; topSE = new double [ freq . length ] [ 3 ] ; bottomNW = new double [ freq . length ] [ 3 ] ; bottomNE = new double [ freq . length ] [ 3 ] ; bottomSW = new double [ freq . length ] [ 3 ] ; bottomSE = new double [ freq . length ] [ 3 ] ; for ( int i = 0 ; i < freq . length ; i ++ ) { topNW [ i ] [ 0 ] = freq [ i ] [ 0 ] - xwidth / 2 ; topNW [ i ] [ 1 ] = freq [ i ] [ 1 ] + ywidth / 2 ; topNW [ i ] [ 2 ] = freq [ i ] [ 2 ] ; topNE [ i ] [ 0 ] = freq [ i ] [ 0 ] + xwidth / 2 ; topNE [ i ] [ 1 ] = freq [ i ] [ 1 ] + ywidth / 2 ; topNE [ i ] [ 2 ] = freq [ i ] [ 2 ] ; topSW [ i ] [ 0 ] = freq [ i ] [ 0 ] - xwidth / 2 ; topSW [ i ] [ 1 ] = freq [ i ] [ 1 ] - ywidth / 2 ; topSW [ i ] [ 2 ] = freq [ i ] [ 2 ] ; topSE [ i ] [ 0 ] = freq [ i ] [ 0 ] + xwidth / 2 ; topSE [ i ] [ 1 ] = freq [ i ] [ 1 ] - ywidth / 2 ; topSE [ i ] [ 2 ] = freq [ i ] [ 2 ] ; bottomNW [ i ] [ 0 ] = freq [ i ] [ 0 ] - xwidth / 2 ; bottomNW [ i ] [ 1 ] = freq [ i ] [ 1 ] + ywidth / 2 ; bottomNW [ i ] [ 2 ] = 0 ; bottomNE [ i ] [ 0 ] = freq [ i ] [ 0 ] + xwidth / 2 ; bottomNE [ i ] [ 1 ] = freq [ i ] [ 1 ] + ywidth / 2 ; bottomNE [ i ] [ 2 ] = 0 ; bottomSW [ i ] [ 0 ] = freq [ i ] [ 0 ] - xwidth / 2 ; bottomSW [ i ] [ 1 ] = freq [ i ] [ 1 ] - ywidth / 2 ; bottomSW [ i ] [ 2 ] = 0 ; bottomSE [ i ] [ 0 ] = freq [ i ] [ 0 ] + xwidth / 2 ; bottomSE [ i ] [ 1 ] = freq [ i ] [ 1 ] - ywidth / 2 ; bottomSE [ i ] [ 2 ] = 0 ; } z = new double [ 6 * freq . length ] ; order = new int [ 6 * freq . length ] ; zTopNW = new double [ freq . length ] ; zTopNE = new double [ freq . length ] ; zTopSW = new double [ freq . length ] ; zTopSE = new double [ freq . length ] ; zBottomNW = new double [ freq . length ] ; zBottomNE = new double [ freq . length ] ; zBottomSW = new double [ freq . length ] ; zBottomSE = new double [ freq . length ] ;
public class ApiOvhEmailmxplan { /** * Accounts associated to this mxplan service * REST : GET / email / mxplan / { service } / account * @ param id [ required ] Filter the value of id property ( like ) * @ param primaryEmailAddress [ required ] Filter the value of primaryEmailAddress property ( like ) * @ param service [ required ] The internal name of your mxplan organization * API beta */ public ArrayList < String > service_account_GET ( String service , Long id , String primaryEmailAddress ) throws IOException { } }
String qPath = "/email/mxplan/{service}/account" ; StringBuilder sb = path ( qPath , service ) ; query ( sb , "id" , id ) ; query ( sb , "primaryEmailAddress" , primaryEmailAddress ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ;
public class JQMListItem { /** * Sets the transition to be used by this list item when loading the URL . */ @ Override public void setTransition ( Transition transition ) { } }
if ( anchor == null ) { if ( transition != null ) { setUrl ( "#" ) ; } else { return ; } } if ( anchor != null ) JQMCommon . setTransition ( anchor , transition ) ;
public class MapTransitionExtractor { /** * Get the tile transition with one group only . * @ param groups The groups ( must contain one group ) . * @ return The tile transition . */ private static Transition getTransitionSingleGroup ( Collection < String > groups ) { } }
final Iterator < String > iterator = groups . iterator ( ) ; final String group = iterator . next ( ) ; return new Transition ( TransitionType . CENTER , group , group ) ;
public class FileAuditHandler { /** * Checks for disk access . * @ param path * the path * @ return true , if successful */ static boolean hasDiskAccess ( final String path ) { } }
try { AccessController . checkPermission ( new FilePermission ( path , "read,write" ) ) ; return true ; } catch ( AccessControlException e ) { return false ; }
public class RandomAccessFile { /** * Reads { @ code byteCount } bytes from this stream and stores them in the byte * array { @ code dst } starting at { @ code offset } . If { @ code byteCount } is zero , then this * method returns without reading any bytes . Otherwise , this method blocks until * { @ code byteCount } bytes have been read . If insufficient bytes are available , * { @ code EOFException } is thrown . If an I / O error occurs , { @ code IOException } is * thrown . When an exception is thrown , some bytes may have been consumed from the stream * and written into the array . * @ param dst * the byte array into which the data is read . * @ param offset * the offset in { @ code dst } at which to store the bytes . * @ param byteCount * the number of bytes to read . * @ throws EOFException * if the end of the source stream is reached before enough * bytes have been read . * @ throws IndexOutOfBoundsException * if { @ code offset < 0 } or { @ code byteCount < 0 } , or * { @ code offset + byteCount > dst . length } . * @ throws IOException * if a problem occurs while reading from this stream . * @ throws NullPointerException * if { @ code dst } is null . */ public final void readFully ( byte [ ] dst , int offset , int byteCount ) throws IOException { } }
Arrays . checkOffsetAndCount ( dst . length , offset , byteCount ) ; while ( byteCount > 0 ) { int result = read ( dst , offset , byteCount ) ; if ( result < 0 ) { throw new EOFException ( ) ; } offset += result ; byteCount -= result ; }
public class BaseDfuImpl { /** * Requests given MTU . This method is only supported on Android Lollipop or newer versions . * Only DFU from SDK 14.1 or newer supports MTU > 23. * @ param mtu new MTU to be requested . */ @ RequiresApi ( api = Build . VERSION_CODES . LOLLIPOP ) void requestMtu ( @ IntRange ( from = 0 , to = 517 ) final int mtu ) throws DeviceDisconnectedException , UploadAbortedException { } }
if ( mAborted ) throw new UploadAbortedException ( ) ; mRequestCompleted = false ; mService . sendLogBroadcast ( DfuBaseService . LOG_LEVEL_VERBOSE , "Requesting new MTU..." ) ; mService . sendLogBroadcast ( DfuBaseService . LOG_LEVEL_DEBUG , "gatt.requestMtu(" + mtu + ")" ) ; if ( ! mGatt . requestMtu ( mtu ) ) return ; // We have to wait until the MTU exchange finishes try { synchronized ( mLock ) { while ( ( ! mRequestCompleted && mConnected && mError == 0 ) || mPaused ) mLock . wait ( ) ; } } catch ( final InterruptedException e ) { loge ( "Sleeping interrupted" , e ) ; } if ( ! mConnected ) throw new DeviceDisconnectedException ( "Unable to read Service Changed CCCD: device disconnected" ) ;
public class CFIRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . CFIRG__FCS_NAME : setFCSName ( FCS_NAME_EDEFAULT ) ; return ; case AfplibPackage . CFIRG__CP_NAME : setCPName ( CP_NAME_EDEFAULT ) ; return ; case AfplibPackage . CFIRG__SV_SIZE : setSVSize ( SV_SIZE_EDEFAULT ) ; return ; case AfplibPackage . CFIRG__SH_SCALE : setSHScale ( SH_SCALE_EDEFAULT ) ; return ; case AfplibPackage . CFIRG__RESERVED : setReserved ( RESERVED_EDEFAULT ) ; return ; case AfplibPackage . CFIRG__SECTION : setSection ( SECTION_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class DescribeAssessmentTemplatesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeAssessmentTemplatesRequest describeAssessmentTemplatesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeAssessmentTemplatesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeAssessmentTemplatesRequest . getAssessmentTemplateArns ( ) , ASSESSMENTTEMPLATEARNS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractRegexParser { /** * Regex Patterns */ private Pattern findingSteps ( ) { } }
String initialStartingWords = concatenateInitialStartingWords ( ) ; String followingStartingWords = concatenateFollowingStartingWords ( ) ; return compile ( "((" + initialStartingWords + ")\\s(.)*?)\\s*(\\Z|" + followingStartingWords + "|\\n" + keywords ( ) . examplesTable ( ) + ")" , DOTALL ) ;
public class TfIdf { /** * 一系列文档的倒排词频 * @ param documentVocabularies 词表 * @ param smooth 平滑参数 , 视作额外有一个文档 , 该文档含有smooth个每个词语 * @ param addOne tf - idf加一平滑 * @ param < TERM > 词语类型 * @ return 一个词语 - > 倒排文档的Map */ public static < TERM > Map < TERM , Double > idf ( Iterable < Iterable < TERM > > documentVocabularies , boolean smooth , boolean addOne ) { } }
Map < TERM , Integer > df = new HashMap < TERM , Integer > ( ) ; int d = smooth ? 1 : 0 ; int a = addOne ? 1 : 0 ; int n = d ; for ( Iterable < TERM > documentVocabulary : documentVocabularies ) { n += 1 ; for ( TERM term : documentVocabulary ) { Integer t = df . get ( term ) ; if ( t == null ) t = d ; df . put ( term , t + 1 ) ; } } Map < TERM , Double > idf = new HashMap < TERM , Double > ( ) ; for ( Map . Entry < TERM , Integer > e : df . entrySet ( ) ) { TERM term = e . getKey ( ) ; double f = e . getValue ( ) ; idf . put ( term , Math . log ( n / f ) + a ) ; } return idf ;
public class ObjectInputStream { /** * Reads the persistent fields of the object that is currently being read * from the source stream . The values read are stored in a GetField object * that provides access to the persistent fields . This GetField object is * then returned . * @ return the GetField object from which persistent fields can be accessed * by name . * @ throws ClassNotFoundException * if the class of an object being deserialized can not be * found . * @ throws IOException * if an error occurs while reading from this stream . * @ throws NotActiveException * if this stream is currently not reading an object . */ public GetField readFields ( ) throws IOException , ClassNotFoundException , NotActiveException { } }
if ( currentObject == null ) { throw new NotActiveException ( ) ; } EmulatedFieldsForLoading result = new EmulatedFieldsForLoading ( currentClass ) ; readFieldValues ( result ) ; return result ;
public class ModuleImpl { /** * For any extra modules specified by { @ code cacheEntry } , obtain a build * future from the module cache manager and add it to the { @ link ModuleBuildReader } * specified by { @ code reader } . * @ param reader * the { @ link ModuleBuildReader } to add the extra modules to * @ param request * The http request * @ param cacheEntry * The cache entry object for the current module * @ throws IOException */ public void processExtraModules ( ModuleBuildReader reader , HttpServletRequest request , CacheEntry cacheEntry ) throws IOException { } }
List < String > extraModules = cacheEntry . getExtraModules ( ) ; if ( ! extraModules . isEmpty ( ) ) { IAggregator aggr = ( IAggregator ) request . getAttribute ( IAggregator . AGGREGATOR_REQATTRNAME ) ; IConfig config = aggr . getConfig ( ) ; for ( String mid : cacheEntry . getExtraModules ( ) ) { ModuleIdentifier ident = new ModuleIdentifier ( mid ) ; String pluginName = ident . getPluginName ( ) ; boolean isJavaScript = pluginName == null || config . getJsPluginDelegators ( ) . contains ( pluginName ) ; URI uri = config . locateModuleResource ( ident . getModuleName ( ) , isJavaScript ) ; IModule module = aggr . newModule ( mid , uri ) ; Future < ModuleBuildReader > future = aggr . getCacheManager ( ) . getCache ( ) . getModules ( ) . getBuild ( request , module ) ; ModuleBuildFuture mbf = new ModuleBuildFuture ( module , future , ModuleSpecifier . BUILD_ADDED ) ; reader . addExtraBuild ( mbf ) ; } }
public class FileBytes { /** * Allocates a randomAccessFile buffer . * If the underlying randomAccessFile is empty , the randomAccessFile count will expand dynamically as bytes are written to the randomAccessFile . * @ param file The randomAccessFile to allocate . * @ param mode The mode in which to open the underlying { @ link java . io . RandomAccessFile } . * @ param size The count of the bytes to allocate . * @ return The allocated buffer . */ public static FileBytes allocate ( File file , String mode , long size ) { } }
return new FileBytes ( file , mode , Memory . Util . toPow2 ( size ) ) ;
public class MiniSatStyleSolver { /** * Compares two variables by their activity . * @ param x the first variable * @ param y the second variable * @ return { @ code true } if the first variable ' s activity is larger then the second one ' s */ public boolean lt ( int x , int y ) { } }
return this . vars . get ( x ) . activity ( ) > this . vars . get ( y ) . activity ( ) ;
public class MonthView { /** * Sets all the parameters for displaying this week . The only required * parameter is the week number . Other parameters have a default value and * will only update if a new value is included , except for focus month , * which will always default to no focus month if no value is passed in . */ public void setMonthParams ( int selectedDay , int year , int month , int weekStart ) { } }
if ( month == - 1 && year == - 1 ) { throw new InvalidParameterException ( "You must specify month and year for this view" ) ; } mSelectedDay = selectedDay ; // Allocate space for caching the day numbers and focus values mMonth = month ; mYear = year ; // Figure out what day today is // final Time today = new Time ( Time . getCurrentTimezone ( ) ) ; // today . setToNow ( ) ; final Calendar today = Calendar . getInstance ( mController . getTimeZone ( ) , mController . getLocale ( ) ) ; mHasToday = false ; mToday = - 1 ; mCalendar . set ( Calendar . MONTH , mMonth ) ; mCalendar . set ( Calendar . YEAR , mYear ) ; mCalendar . set ( Calendar . DAY_OF_MONTH , 1 ) ; mDayOfWeekStart = mCalendar . get ( Calendar . DAY_OF_WEEK ) ; if ( weekStart != - 1 ) { mWeekStart = weekStart ; } else { mWeekStart = mCalendar . getFirstDayOfWeek ( ) ; } mNumCells = mCalendar . getActualMaximum ( Calendar . DAY_OF_MONTH ) ; for ( int i = 0 ; i < mNumCells ; i ++ ) { final int day = i + 1 ; if ( sameDay ( day , today ) ) { mHasToday = true ; mToday = day ; } } mNumRows = calculateNumRows ( ) ; // Invalidate cached accessibility information . mTouchHelper . invalidateRoot ( ) ;
public class ServerConnectionPoliciesInner { /** * Creates or updates the server ' s connection policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param connectionType The server connection type . Possible values include : ' Default ' , ' Proxy ' , ' Redirect ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ServerConnectionPolicyInner object */ public Observable < ServiceResponse < ServerConnectionPolicyInner > > createOrUpdateWithServiceResponseAsync ( String resourceGroupName , String serverName , ServerConnectionType connectionType ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( connectionType == null ) { throw new IllegalArgumentException ( "Parameter connectionType is required and cannot be null." ) ; } final String connectionPolicyName = "default" ; ServerConnectionPolicyInner parameters = new ServerConnectionPolicyInner ( ) ; parameters . withConnectionType ( connectionType ) ; return service . createOrUpdate ( this . client . subscriptionId ( ) , resourceGroupName , serverName , connectionPolicyName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ServerConnectionPolicyInner > > > ( ) { @ Override public Observable < ServiceResponse < ServerConnectionPolicyInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ServerConnectionPolicyInner > clientResponse = createOrUpdateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ServerKeysInner { /** * Gets a list of server keys . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ServerKeyInner & gt ; object */ public Observable < Page < ServerKeyInner > > listByServerNextAsync ( final String nextPageLink ) { } }
return listByServerNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ServerKeyInner > > , Page < ServerKeyInner > > ( ) { @ Override public Page < ServerKeyInner > call ( ServiceResponse < Page < ServerKeyInner > > response ) { return response . body ( ) ; } } ) ;
public class ShardingTransactionManagerEngine { /** * Initialize sharding transaction managers . * @ param databaseType database type * @ param dataSourceMap data source map */ public void init ( final DatabaseType databaseType , final Map < String , DataSource > dataSourceMap ) { } }
for ( Entry < TransactionType , ShardingTransactionManager > entry : transactionManagerMap . entrySet ( ) ) { entry . getValue ( ) . init ( databaseType , getResourceDataSources ( dataSourceMap ) ) ; }
public class AjaxInterceptor { /** * Check if process this trigger only . * @ param triggerWithContext the trigger with its context * @ param operation current ajax operation * @ return true if process this trigger only */ private boolean isProcessTriggerOnly ( final ComponentWithContext triggerWithContext , final AjaxOperation operation ) { } }
// Target container implies only process the trigger or is Internal Ajax if ( operation . getTargetContainerId ( ) != null || operation . isInternalAjaxRequest ( ) ) { return true ; } WComponent trigger = triggerWithContext . getComponent ( ) ; // Check if trigger is a polling AJAX control if ( trigger instanceof WAjaxControl ) { // Get user context UIContext uic = triggerWithContext . getContext ( ) ; UIContextHolder . pushContext ( uic ) ; try { WAjaxControl ajax = ( WAjaxControl ) trigger ; // Is a polling region so only process trigger if ( ajax . getDelay ( ) > 0 ) { return true ; } } finally { UIContextHolder . popContext ( ) ; } } return false ;
public class Points { /** * Returns the squared Euclidian distance between the specified two points . */ public static int distanceSq ( int x1 , int y1 , int x2 , int y2 ) { } }
x2 -= x1 ; y2 -= y1 ; return x2 * x2 + y2 * y2 ;
public class XESLogParser { /** * Parses the specified log file and returns a collection of processes . * @ param inputStream * { @ link InputStream } to parse * @ param parsingMode * @ return Collection of processes , which consist of a collection of instances , which again consist of a collection of { @ link LogTrace } objects . * @ throws ParameterException * Gets thrown if there ' s a discrepancy in how the file should be interpreted . * @ throws ParserException * Gets thrown if the given file can ' t be read , is a directory , or doesn ' t exist . */ @ Override public List < List < LogTrace < LogEntry > > > parse ( InputStream inputStream , ParsingMode parsingMode ) throws ParameterException , ParserException { } }
try { inputStream . available ( ) ; } catch ( IOException e ) { throw new ParameterException ( "Unable to read input file: " + e . getMessage ( ) ) ; } Collection < XLog > logs = null ; XParser parser = ParserFileFormat . XES . getParser ( ) ; try { logs = parser . parse ( inputStream ) ; } catch ( Exception e ) { throw new ParserException ( "Exception while parsing with OpenXES: " + e . getMessage ( ) ) ; } if ( logs == null ) throw new ParserException ( "No suitable parser could have been found!" ) ; parsedLogFiles = new ArrayList < > ( logs . size ( ) ) ; Set < List < String > > activitySequencesSet = new HashSet < > ( ) ; Set < LogTrace < LogEntry > > traceSet = new HashSet < > ( ) ; for ( XLog log : logs ) { activitySequencesSet . clear ( ) ; traceSet . clear ( ) ; Class < ? > logEntryClass = null ; List < LogTrace < LogEntry > > logTraces = new ArrayList < > ( ) ; if ( containsDataUsageExtension ( log ) ) { logEntryClass = DULogEntry . class ; } else { logEntryClass = LogEntry . class ; } for ( XTrace trace : log ) { Integer traceID = null ; // Extract trace ID for ( Map . Entry < String , XAttribute > attribute : trace . getAttributes ( ) . entrySet ( ) ) { String key = attribute . getKey ( ) ; String value = attribute . getValue ( ) . toString ( ) ; if ( key . equals ( "concept:name" ) ) { try { traceID = Integer . parseInt ( value ) ; } catch ( NumberFormatException e ) { // if NAN , take the hash traceID = value . hashCode ( ) ; } if ( traceID < 0 ) { traceID *= Integer . signum ( traceID ) ; } } } if ( traceID == null ) throw new ParserException ( "Cannot extract case-id" ) ; // Build new log trace LogTrace < LogEntry > logTrace = new LogTrace < > ( traceID ) ; // Check for similar instances Collection < Long > similarInstances = getSimilarInstances ( trace ) ; if ( similarInstances != null ) { logTrace . setSimilarInstances ( similarInstances ) ; } for ( XEvent event : trace ) { // Add events to log trace logTrace . addEntry ( buildLogEntry ( event , logEntryClass ) ) ; } switch ( parsingMode ) { case DISTINCT_ACTIVITY_SEQUENCES : if ( ! activitySequencesSet . add ( logTrace . getActivities ( ) ) ) break ; logTrace . reduceToActivities ( ) ; // case DISTINCT _ TRACES : // if ( ! traceSet . add ( logTrace ) ) // break ; case COMPLETE : logTraces . add ( logTrace ) ; } } parsedLogFiles . add ( logTraces ) ; summaries . add ( new LogSummary < > ( logTraces ) ) ; } return parsedLogFiles ;
public class Tensor { /** * Divides the value from the idx ' th entry . */ public void divideValue ( int idx , double val ) { } }
values [ idx ] = s . divide ( values [ idx ] , val ) ;
public class GVRPlaneEmitter { /** * Generate random time stamps from the current time upto the next one second . * Passed as texture coordinates to the vertex shader , an unused field is present * with every pair passed . * @ param totalTime * @ return */ private float [ ] generateParticleTimeStamps ( float totalTime ) { } }
float timeStamps [ ] = new float [ mEmitRate * 2 ] ; for ( int i = 0 ; i < mEmitRate * 2 ; i += 2 ) { timeStamps [ i ] = totalTime + mRandom . nextFloat ( ) ; timeStamps [ i + 1 ] = 0 ; } return timeStamps ;
public class ExcelSerde { /** * Initializes the SerDe \ n * You can define in the table properties ( additionally to the standard Hive properties ) the following options \ n * office . hive . write . defaultSheetName : The sheetname to which data should be written ( note : as an input any sheets can be read or selected sheets according to HadoopOffice configuration values ) \ n * Any of the HadoopOffice options ( hadoopoffice . * ) , such as encryption , signing , low footprint mode , linked workbooks , can be defined in the table properties @ see < a href = " https : / / github . com / ZuInnoTe / hadoopoffice / wiki / Hadoop - File - Format " > HadoopOffice configuration < / a > \ n * @ param conf Hadoop Configuration * @ param prop table properties . * @ param partitionProperties ignored . Partitions are not supported . */ @ Override public void initialize ( Configuration conf , Properties prop , Properties partitionProperties ) throws SerDeException { } }
LOG . debug ( "Initializing Excel Hive Serde" ) ; LOG . debug ( "Configuring Hive-only options" ) ; // configure hadoopoffice specific hive options String defaultSheetNameStr = prop . getProperty ( ExcelSerde . CONF_DEFAULTSHEETNAME ) ; if ( defaultSheetNameStr != null ) { this . defaultSheetName = defaultSheetNameStr ; } // copy hadoopoffice options LOG . debug ( "Configuring HadoopOffice Format" ) ; Set < Entry < Object , Object > > entries = prop . entrySet ( ) ; for ( Entry < Object , Object > entry : entries ) { if ( ( entry . getKey ( ) instanceof String ) && ( ( String ) entry . getKey ( ) ) . startsWith ( ExcelSerde . HOSUFFIX ) ) { if ( ( "TRUE" . equalsIgnoreCase ( ( String ) entry . getValue ( ) ) ) || ( "FALSE" . equalsIgnoreCase ( ( ( String ) entry . getValue ( ) ) ) ) ) { conf . setBoolean ( ( String ) entry . getKey ( ) , Boolean . valueOf ( ( String ) entry . getValue ( ) ) ) ; } else { conf . set ( ( String ) entry . getKey ( ) , ( String ) entry . getValue ( ) ) ; } } } // create object inspector ( always a struct = row ) LOG . debug ( "Creating object inspector" ) ; this . columnNames = Arrays . asList ( prop . getProperty ( serdeConstants . LIST_COLUMNS ) . split ( "," ) ) ; this . columnTypes = TypeInfoUtils . getTypeInfosFromTypeString ( prop . getProperty ( serdeConstants . LIST_COLUMN_TYPES ) ) ; final List < ObjectInspector > columnOIs = new ArrayList < ObjectInspector > ( columnNames . size ( ) ) ; for ( TypeInfo currentColumnType : columnTypes ) { columnOIs . add ( TypeInfoUtils . getStandardJavaObjectInspectorFromTypeInfo ( currentColumnType ) ) ; } this . oi = ObjectInspectorFactory . getStandardStructObjectInspector ( columnNames , columnOIs ) ; // create converter LOG . debug ( "Creating converter" ) ; HadoopOfficeReadConfiguration hocr = new HadoopOfficeReadConfiguration ( conf ) ; this . readConverter = new ExcelConverterSimpleSpreadSheetCellDAO ( hocr . getSimpleDateFormat ( ) , hocr . getSimpleDecimalFormat ( ) , hocr . getSimpleDateTimeFormat ( ) ) ; HadoopOfficeWriteConfiguration howc = new HadoopOfficeWriteConfiguration ( conf , "" ) ; this . writeConverter = new ExcelConverterSimpleSpreadSheetCellDAO ( howc . getSimpleDateFormat ( ) , howc . getSimpleDecimalFormat ( ) , howc . getSimpleDateTimeFormat ( ) ) ; // configure writing of header this . writeHeader = howc . getWriteHeader ( ) ; GenericDataType [ ] columnsGD = new GenericDataType [ columnNames . size ( ) ] ; for ( int i = 0 ; i < columnOIs . size ( ) ; i ++ ) { ObjectInspector currentOI = columnOIs . get ( i ) ; if ( currentOI instanceof BooleanObjectInspector ) { columnsGD [ i ] = new GenericBooleanDataType ( ) ; } else if ( currentOI instanceof DateObjectInspector ) { columnsGD [ i ] = new GenericDateDataType ( ) ; } else if ( currentOI instanceof TimestampObjectInspector ) { columnsGD [ i ] = new GenericTimestampDataType ( ) ; } else if ( currentOI instanceof ByteObjectInspector ) { columnsGD [ i ] = new GenericByteDataType ( ) ; } else if ( currentOI instanceof ShortObjectInspector ) { columnsGD [ i ] = new GenericShortDataType ( ) ; } else if ( currentOI instanceof IntObjectInspector ) { columnsGD [ i ] = new GenericIntegerDataType ( ) ; } else if ( currentOI instanceof LongObjectInspector ) { columnsGD [ i ] = new GenericLongDataType ( ) ; } else if ( currentOI instanceof DoubleObjectInspector ) { columnsGD [ i ] = new GenericDoubleDataType ( ) ; } else if ( currentOI instanceof FloatObjectInspector ) { columnsGD [ i ] = new GenericFloatDataType ( ) ; } else if ( currentOI instanceof HiveDecimalObjectInspector ) { HiveDecimalObjectInspector currentOIHiveDecimalOI = ( HiveDecimalObjectInspector ) currentOI ; columnsGD [ i ] = new GenericBigDecimalDataType ( currentOIHiveDecimalOI . precision ( ) , currentOIHiveDecimalOI . scale ( ) ) ; } else if ( currentOI instanceof StringObjectInspector ) { columnsGD [ i ] = new GenericStringDataType ( ) ; } else { LOG . warn ( "Could not detect desired datatype for column " + i + ". Type " + currentOI . getTypeName ( ) + ". Using String" ) ; columnsGD [ i ] = new GenericStringDataType ( ) ; } } this . readConverter . setSchemaRow ( columnsGD ) ; this . writeConverter . setSchemaRow ( columnsGD ) ; // create nullrow this . nullRow = new Object [ this . columnNames . size ( ) ] ; // set writerow this . currentWriteRow = 0 ; // set outputrow this . outputRow = new Object [ this . columnNames . size ( ) ] ; LOG . debug ( "Finished Initialization" ) ;
public class base { /** * Binary exponentiation algorithm . * @ param b the base number . * @ param e the exponent . * @ return { @ code b ^ e } . */ public static long pow ( final long b , final long e ) { } }
long base = b ; long exp = e ; long result = 1 ; while ( exp != 0 ) { if ( ( exp & 1 ) != 0 ) { result *= base ; } exp >>>= 1 ; base *= base ; } return result ;
public class Static { /** * Converts the given iterable to a { @ link List } . Note : traversing the iterable may destroy the original . * @ param iterable The iterable * @ param < T > The base type of the iterable * @ return A list that contains the elements of the given iterable */ public static < T > List < T > toList ( TypedIterable < T > iterable ) { } }
List < T > result = new ArrayList < > ( ) ; for ( T item : iterable ) { result . add ( item ) ; } return result ;
public class InternalFedoraBinary { /** * Retrieve the JCR Binary object * @ return a JCR - wrapped Binary object */ private javax . jcr . Binary getBinaryContent ( ) { } }
try { return getProperty ( JCR_DATA ) . getBinary ( ) ; } catch ( final PathNotFoundException e ) { throw new PathNotFoundRuntimeException ( e ) ; } catch ( final RepositoryException e ) { throw new RepositoryRuntimeException ( e ) ; }
public class Util { /** * decode bytes from an input stream into Mikrotik protocol sentences */ private static void decode ( InputStream in , StringBuilder result ) throws ApiDataException , ApiConnectionException { } }
try { int len = readLen ( in ) ; if ( len > 0 ) { byte buf [ ] = new byte [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { int c = in . read ( ) ; if ( c < 0 ) { throw new ApiDataException ( "Truncated data. Expected to read more bytes" ) ; } buf [ i ] = ( byte ) ( c & 0xFF ) ; } String res = new String ( buf , Charset . forName ( "UTF-8" ) ) ; if ( result . length ( ) > 0 ) { result . append ( "\n" ) ; } result . append ( res ) ; decode ( in , result ) ; } } catch ( IOException ex ) { throw new ApiConnectionException ( ex . getMessage ( ) , ex ) ; }
public class LeaderRetrievalUtils { /** * Retrieves the leader akka url and the current leader session ID . The values are stored in a * { @ link LeaderConnectionInfo } instance . * @ param leaderRetrievalService Leader retrieval service to retrieve the leader connection * information * @ param timeout Timeout when to give up looking for the leader * @ return LeaderConnectionInfo containing the leader ' s akka URL and the current leader session * ID * @ throws LeaderRetrievalException */ public static LeaderConnectionInfo retrieveLeaderConnectionInfo ( LeaderRetrievalService leaderRetrievalService , FiniteDuration timeout ) throws LeaderRetrievalException { } }
LeaderConnectionInfoListener listener = new LeaderConnectionInfoListener ( ) ; try { leaderRetrievalService . start ( listener ) ; Future < LeaderConnectionInfo > connectionInfoFuture = listener . getLeaderConnectionInfoFuture ( ) ; return Await . result ( connectionInfoFuture , timeout ) ; } catch ( Exception e ) { throw new LeaderRetrievalException ( "Could not retrieve the leader address and leader " + "session ID." , e ) ; } finally { try { leaderRetrievalService . stop ( ) ; } catch ( Exception fe ) { LOG . warn ( "Could not stop the leader retrieval service." , fe ) ; } }
public class PactDslJsonArray { /** * Attribute that must be equal to the provided value . * @ param value Value that will be used for comparisons */ public PactDslJsonArray equalsTo ( Object value ) { } }
body . put ( value ) ; matchers . addRule ( rootPath + appendArrayIndex ( 0 ) , EqualsMatcher . INSTANCE ) ; return this ;
public class CmsCategoryTree { /** * Updates the content of the categories list . < p > * @ param treeItemsToShow the updates list of categories tree item beans */ public void updateContentList ( List < CmsTreeItem > treeItemsToShow ) { } }
m_scrollList . clearList ( ) ; if ( ( treeItemsToShow != null ) && ! treeItemsToShow . isEmpty ( ) ) { for ( CmsTreeItem dataValue : treeItemsToShow ) { dataValue . removeOpener ( ) ; m_scrollList . add ( dataValue ) ; CmsScrollPanel scrollparent = ( CmsScrollPanel ) m_scrollList . getParent ( ) ; scrollparent . onResizeDescendant ( ) ; } } else { showIsEmptyLabel ( ) ; } scheduleResize ( ) ;
public class MavenJDOMWriter { /** * Method updateModelBase . * @ param value * @ param element * @ param counter * @ param xmlTag */ protected void updateModelBase ( ModelBase value , String xmlTag , Counter counter , Element element ) { } }
boolean shouldExist = value != null ; Element root = updateElement ( counter , element , xmlTag , shouldExist ) ; if ( shouldExist ) { Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; findAndReplaceSimpleLists ( innerCount , root , value . getModules ( ) , "modules" , "module" ) ; iterateRepository ( innerCount , root , value . getRepositories ( ) , "repositories" , "repository" ) ; iterateRepository ( innerCount , root , value . getPluginRepositories ( ) , "pluginRepositories" , "pluginRepository" ) ; iterateDependency ( innerCount , root , value . getDependencies ( ) , "dependencies" , "dependency" ) ; findAndReplaceXpp3DOM ( innerCount , root , "reports" , ( Xpp3Dom ) value . getReports ( ) ) ; updateReporting ( value . getReporting ( ) , "reporting" , innerCount , root ) ; updateDependencyManagement ( value . getDependencyManagement ( ) , "dependencyManagement" , innerCount , root ) ; updateDistributionManagement ( value . getDistributionManagement ( ) , "distributionManagement" , innerCount , root ) ; findAndReplaceProperties ( innerCount , root , "properties" , value . getProperties ( ) ) ; }
public class OjbTagsHandler { /** * Returns the value of a property of the current object on the specified level . * @ param attributes The attributes of the tag * @ return The property value * @ exception XDocletException If an error occurs * @ doc . tag type = " content " * @ doc . param name = " level " optional = " false " description = " The level for the current object " * values = " class , field , reference , collection " * @ doc . param name = " name " optional = " false " description = " The name of the property " * @ doc . param name = " default " optional = " true " description = " A default value to use if the property * is not defined " */ public String propertyValue ( Properties attributes ) throws XDocletException { } }
String value = getPropertyValue ( attributes . getProperty ( ATTRIBUTE_LEVEL ) , attributes . getProperty ( ATTRIBUTE_NAME ) ) ; if ( value == null ) { value = attributes . getProperty ( ATTRIBUTE_DEFAULT ) ; } return value ;
public class authenticationvserver_stats { /** * Use this API to fetch statistics of authenticationvserver _ stats resource of given name . */ public static authenticationvserver_stats get ( nitro_service service , String name ) throws Exception { } }
authenticationvserver_stats obj = new authenticationvserver_stats ( ) ; obj . set_name ( name ) ; authenticationvserver_stats response = ( authenticationvserver_stats ) obj . stat_resource ( service ) ; return response ;
public class Heritrix3Wrapper { /** * TODO * @ param tries * @ param interval * @ return engine state and a list of registered jobs */ public EngineResult waitForEngineReady ( int tries , int interval ) { } }
EngineResult engineResult = null ; if ( tries <= 0 ) { tries = 1 ; } if ( interval <= 99 ) { interval = 1000 ; } boolean bLoop = true ; while ( bLoop && tries > 0 ) { engineResult = rescanJobDirectory ( ) ; // debug // System . out . println ( engineResult . status + " - " + ResultStatus . OK ) ; if ( engineResult . status == ResultStatus . OK ) { bLoop = false ; } -- tries ; if ( bLoop && tries > 0 ) { try { Thread . sleep ( interval ) ; } catch ( InterruptedException e ) { } } } return engineResult ;
public class Application { /** * If no calls have been made to < code > addELContextListener ( javax . el . ELContextListener ) < / code > , this method must * return an empty array * Otherwise , return an array representing the list of listeners added by calls to * < code > addELContextListener ( javax . el . ELContextListener ) < / code > . * An < code > implementation < / code > is provided that throws UnsupportedOperationException so that users that decorate * the < code > Application < / code > continue to work . * @ since 1.2 */ public ELContextListener [ ] getELContextListeners ( ) { } }
Application application = getMyfacesApplicationInstance ( ) ; if ( application != null ) { return application . getELContextListeners ( ) ; } throw new UnsupportedOperationException ( ) ;
public class ValueMapImpl { /** * Add a MapItemValue to the map . * @ param miv map value item . */ @ Override public void add ( MapItemValue miv ) { } }
if ( len >= items . length ) { items = Arry . grow ( items ) ; } items [ len ] = miv ; len ++ ;
public class ElasticsearchEmbeddedNode { /** * This method closes the elasticsearch node . */ public void close ( ) { } }
if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "Close Elasticsearch node=" + node + " client=" + client ) ; } if ( client != null ) { client . close ( ) ; client = null ; } if ( node != null ) { node . stop ( ) ; node = null ; }
public class BoxCollaborationWhitelist { /** * Creates a new Collaboration Whitelist for a domain . * @ param api the API connection to be used by the resource . * @ param domain the domain to be added to a collaboration whitelist for a Box Enterprise . * @ param direction an enum representing the direction of the collaboration whitelist . Can be set to * inbound , outbound , or both . * @ return information about the collaboration whitelist created . */ public static BoxCollaborationWhitelist . Info create ( final BoxAPIConnection api , String domain , WhitelistDirection direction ) { } }
URL url = COLLABORATION_WHITELIST_ENTRIES_URL_TEMPLATE . build ( api . getBaseURL ( ) ) ; BoxJSONRequest request = new BoxJSONRequest ( api , url , HttpMethod . POST ) ; JsonObject requestJSON = new JsonObject ( ) . add ( "domain" , domain ) . add ( "direction" , direction . toString ( ) ) ; request . setBody ( requestJSON . toString ( ) ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject responseJSON = JsonObject . readFrom ( response . getJSON ( ) ) ; BoxCollaborationWhitelist domainWhitelist = new BoxCollaborationWhitelist ( api , responseJSON . get ( "id" ) . asString ( ) ) ; return domainWhitelist . new Info ( responseJSON ) ;
public class WebSocketHandlerAdapter { /** * Send authorize token to the Gateway */ @ Override public synchronized void processAuthorize ( WebSocketChannel channel , String authorizeToken ) { } }
nextHandler . processAuthorize ( channel , authorizeToken ) ;
public class Handler { /** * Updatable */ @ Override public void update ( double extrp ) { } }
updateRemove ( ) ; updateAdd ( ) ; for ( final ComponentUpdater component : updaters ) { component . update ( extrp , featurables ) ; }
public class Region { /** * Finds a region based on a label or name . * A region name is lower - cased label with spaces removed . * @ param labelOrName the region name or label * @ return the found region or null if there ' s no such region */ public static Region findByLabelOrName ( String labelOrName ) { } }
if ( labelOrName == null ) { return null ; } return VALUES_BY_NAME . get ( labelOrName . toLowerCase ( ) . replace ( " " , "" ) ) ;
public class NodeTraversal { /** * Returns the current scope ' s root . */ public Node getScopeRoot ( ) { } }
int roots = scopeRoots . size ( ) ; if ( roots > 0 ) { return scopeRoots . get ( roots - 1 ) ; } else { AbstractScope < ? , ? > s = scopes . peek ( ) ; return s != null ? s . getRootNode ( ) : null ; }
public class UriMapping { /** * Creates a UriMapping instance based on the mapping definition given as parameter . * Mapping is split in 3 parts : * < ul > * < li > Host , including the scheme and port : http : / / www . example : 8080 < / li > * < li > path , left part before the wildcard caracter * < / li > * < li > extension , right part after the wildcard caracter * < / li > * < / ul > * @ param mapping * the mapping expression as string * @ return the uri mapping object * @ throws ConfigurationException */ public static UriMapping create ( String mapping ) { } }
Matcher matcher = MAPPING_PATTERN . matcher ( mapping ) ; if ( ! matcher . matches ( ) ) { throw new ConfigurationException ( "Unrecognized URI pattern: " + mapping ) ; } String host = StringUtils . trimToNull ( matcher . group ( 1 ) ) ; String path = StringUtils . trimToNull ( matcher . group ( 5 ) ) ; if ( path != null && ! path . startsWith ( "/" ) ) { throw new ConfigurationException ( "Unrecognized URI pattern: " + mapping + " Mapping path should start with / was: " + path ) ; } String extension = StringUtils . trimToNull ( matcher . group ( 7 ) ) ; if ( extension != null && ! extension . startsWith ( "." ) ) { throw new ConfigurationException ( "Unrecognized URI pattern: " + mapping + " Mapping extension should start with . was: " + extension ) ; } return new UriMapping ( host , path , extension ) ;
public class LocalisationManager { /** * Method updateQueuePointOutputHandler . * @ param outputHandler * < p > Add the outputHandler to the set of queuePointOutputHanders < / p > */ public void updateQueuePointOutputHandler ( SIBUuid8 newLocalisingMEUuid , OutputHandler outputHandler , SIBUuid8 existingUuid ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateQueuePointOutputHandler" , new Object [ ] { newLocalisingMEUuid , outputHandler , existingUuid } ) ; synchronized ( _queuePointOutputHandlers ) { _queuePointOutputHandlers . remove ( existingUuid ) ; _queuePointOutputHandlers . put ( newLocalisingMEUuid , outputHandler ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateQueuePointOutputHandler" ) ;
public class Misc { /** * Loads a file into memory from the classpath */ public static File getResourceAsFile ( String resource ) { } }
ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; try { return new File ( URLDecoder . decode ( cl . getResource ( resource ) . getFile ( ) , "UTF-8" ) ) ; } catch ( UnsupportedEncodingException uee ) { return null ; }
public class AccountsInner { /** * Updates a Cognitive Services account . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param accountName The name of Cognitive Services account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the CognitiveServicesAccountInner object */ public Observable < ServiceResponse < CognitiveServicesAccountInner > > updateWithServiceResponseAsync ( String resourceGroupName , String accountName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final Sku sku = null ; final Map < String , String > tags = null ; CognitiveServicesAccountUpdateParameters parameters = new CognitiveServicesAccountUpdateParameters ( ) ; parameters . withSku ( null ) ; parameters . withTags ( null ) ; return service . update ( resourceGroupName , accountName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < CognitiveServicesAccountInner > > > ( ) { @ Override public Observable < ServiceResponse < CognitiveServicesAccountInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < CognitiveServicesAccountInner > clientResponse = updateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Lists { /** * Returns the length of the longest trailing partial sublist of the * target list within the specified source list , or 0 if there is no such * occurrence . More formally , returns the length < tt > i < / tt > * such that * { @ code source . subList ( source . size ( ) - i , source . size ( ) ) . equals ( target . subList ( target . size ( ) - i , target . size ( ) ) ) } , * or 0 if there is no such index . * @ param source the list in which to search for the longest trailing partial sublist * of < tt > target < / tt > . * @ param target the list to search for as a trailing partial sublist of < tt > source < / tt > . * @ return the length of the last occurrence of trailing partial sublist the specified * target list within the specified source list , or 0 if there is no such occurrence . * @ since 1.4 */ static int lengthOfTrailingPartialSubList ( final List < ? > source , final List < ? > target ) { } }
final int s = source . size ( ) - 1 ; final int t = target . size ( ) - 1 ; int l = 0 ; while ( l <= s && l <= t && source . get ( s - l ) . equals ( target . get ( t - l ) ) ) { l ++ ; } return l ;
public class EvaluatorRegistry { /** * Returns the evaluator instance for the given type and the * defined parameterText * @ param type the type of the attributes this evaluator will * operate on . This is important because the evaluator * may do optimizations and type coercion based on the * types it is evaluating . It is also possible that * this evaluator does not support a given type . * @ param operator the operator that evaluator implements * @ return an Evaluator instance capable of evaluating expressions * between values of the given type , or null in case the type * is not supported . */ public Evaluator getEvaluator ( ValueType type , Operator operator ) { } }
return this . getEvaluatorDefinition ( operator ) . getEvaluator ( type , operator ) ;
public class UriComponentsBuilder { /** * Returns a builder that is initialized with the given { @ code URI } . * @ param uri the URI to initialize with * @ return the new { @ code UriComponentsBuilder } */ public static UriComponentsBuilder fromUri ( URI uri ) { } }
UriComponentsBuilder builder = new UriComponentsBuilder ( ) ; builder . uri ( uri ) ; return builder ;
public class Orders { /** * 构建查询订单参数 * @ param queryParams 查询参数 */ private void buildQueryParams ( Map < String , String > queryParams ) { } }
buildConfigParams ( queryParams ) ; put ( queryParams , WepayField . NONCE_STR , RandomStrs . generate ( 16 ) ) ; buildSignParams ( queryParams ) ;
public class NeuralNetwork { /** * Returns the activation function of output layer based on natural pairing . * @ param error the error function . * @ param k the number of output nodes . * @ return the activation function of output layer based on natural pairing */ private static ActivationFunction natural ( ErrorFunction error , int k ) { } }
if ( error == ErrorFunction . CROSS_ENTROPY ) { if ( k == 1 ) { return ActivationFunction . LOGISTIC_SIGMOID ; } else { return ActivationFunction . SOFTMAX ; } } else { return ActivationFunction . LOGISTIC_SIGMOID ; }
public class Branch { /** * Branch collect the URLs in the incoming intent for better attribution . Branch SDK extensively check for any sensitive data in the URL and skip if exist . * However the following method provisions application to set SDK to collect only URLs in particular form . This method allow application to specify a set of regular expressions to white list the URL collection . * If whitelist is not empty SDK will collect only the URLs that matches the white list . * This method should be called immediately after calling { @ link Branch # getAutoInstance ( Context ) } * @ param urlWhiteListPattern A regular expression with a URI white listing pattern * @ return { @ link Branch } instance for successive method calls */ public Branch addWhiteListedScheme ( String urlWhiteListPattern ) { } }
if ( urlWhiteListPattern != null ) { UniversalResourceAnalyser . getInstance ( context_ ) . addToAcceptURLFormats ( urlWhiteListPattern ) ; } return this ;
public class Assert2 { /** * Asserts that every file that exists relative to expected also exists * relative to actual . * @ param expected the expected path * @ param actual the actual path * @ throws IOException if the paths cannot be walked */ public static void containsAll ( final Path expected , final Path actual ) throws IOException { } }
final Assertion < Path > exists = existsIn ( expected , actual ) ; if ( Files . exists ( expected ) ) { walkFileTree ( expected , new SimpleFileVisitor < Path > ( ) { @ Override public FileVisitResult visitFile ( final Path file , final BasicFileAttributes attrs ) throws IOException { assertThat ( file , exists ) ; return super . visitFile ( file , attrs ) ; } } ) ; }
public class Dictionaries { /** * Load the dictionaries . * @ param inputDir * the input directory * @ throws IOException * throws an exception if directory does not exist */ private void loadDictionaries ( final String inputDir ) throws IOException { } }
final List < File > fileList = StringUtils . getFilesInDir ( new File ( inputDir ) ) ; dictNames = new ArrayList < String > ( fileList . size ( ) ) ; dictionaries = new ArrayList < Map < String , String > > ( fileList . size ( ) ) ; dictionariesIgnoreCase = new ArrayList < Map < String , String > > ( fileList . size ( ) ) ; System . err . println ( "\tloading dictionaries in " + inputDir + " directory" ) ; for ( int i = 0 ; i < fileList . size ( ) ; ++ i ) { if ( DEBUG ) { System . err . println ( "\tloading dictionary:...." + fileList . get ( i ) . getCanonicalPath ( ) ) ; } dictNames . add ( fileList . get ( i ) . getCanonicalPath ( ) ) ; dictionaries . add ( new HashMap < String , String > ( ) ) ; dictionariesIgnoreCase . add ( new HashMap < String , String > ( ) ) ; final List < String > fileLines = Files . readLines ( fileList . get ( i ) , Charsets . UTF_8 ) ; for ( final String line : fileLines ) { final String [ ] lineArray = tabPattern . split ( line ) ; if ( lineArray . length == 2 ) { dictionaries . get ( i ) . put ( lineArray [ 0 ] , lineArray [ 1 ] ) ; if ( ! line . equalsIgnoreCase ( "in" ) && ! line . equalsIgnoreCase ( "on" ) && ! line . equalsIgnoreCase ( "us" ) && ! line . equalsIgnoreCase ( "or" ) && ! line . equalsIgnoreCase ( "am" ) ) { dictionariesIgnoreCase . get ( i ) . put ( lineArray [ 0 ] . toLowerCase ( ) , lineArray [ 1 ] ) ; } } } } System . err . println ( "found " + dictionaries . size ( ) + " dictionaries" ) ;
public class RdKNNTree { /** * Performs necessary operations before inserting the specified entry . * @ param entry the entry to be inserted */ @ Override protected void preInsert ( RdKNNEntry entry ) { } }
KNNHeap knns_o = DBIDUtil . newHeap ( settings . k_max ) ; preInsert ( entry , getRootEntry ( ) , knns_o ) ;
public class TryWithResourcesASTTransformation { /** * # primaryExc . addSuppressed ( # suppressedExc ) ; */ private ExpressionStatement createAddSuppressedStatement ( String primaryExcName , String suppressedExcName ) { } }
MethodCallExpression addSuppressedMethodCallExpression = new MethodCallExpression ( new VariableExpression ( primaryExcName ) , "addSuppressed" , new ArgumentListExpression ( Collections . singletonList ( new VariableExpression ( suppressedExcName ) ) ) ) ; addSuppressedMethodCallExpression . setImplicitThis ( false ) ; addSuppressedMethodCallExpression . setSafe ( true ) ; return new ExpressionStatement ( addSuppressedMethodCallExpression ) ;
public class Events { /** * Creates a network event with the source set to the object passed in as * parameter and the { @ link DeliveryGuaranty } set to the incoming * parameter . * @ param source * The payload of the event . This is the actual data that gets * transmitted to remote machine . * @ param deliveryGuaranty * This decides which transport TCP or UDP to be used to send the * message to remote machine . * @ return An instance of { @ link NetworkEvent } */ public static NetworkEvent networkEvent ( Object source , DeliveryGuaranty deliveryGuaranty ) { } }
Event event = event ( source , Events . NETWORK_MESSAGE ) ; NetworkEvent networkEvent = new DefaultNetworkEvent ( event ) ; networkEvent . setDeliveryGuaranty ( deliveryGuaranty ) ; return networkEvent ;
public class InjectorImpl { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . api . Injector # inject ( java . lang . Object ) */ @ Override public < T > T inject ( T target ) { } }
Validate . notNull ( target , "Target must be specified." ) ; manager . inject ( target ) ; return target ;
public class LineManager { /** * Set the LineTranslateAnchor property * Controls the frame of reference for { @ link PropertyFactory # lineTranslate } . * @ param value property wrapper value around String */ public void setLineTranslateAnchor ( @ Property . LINE_TRANSLATE_ANCHOR String value ) { } }
PropertyValue propertyValue = lineTranslateAnchor ( value ) ; constantPropertyUsageMap . put ( PROPERTY_LINE_TRANSLATE_ANCHOR , propertyValue ) ; layer . setProperties ( propertyValue ) ;
public class Get { /** * Executes a getter ( < tt > getX ( ) < / tt > ) on the target object which returns String [ ] . * If the specified attribute is , for example , " < tt > name < / tt > " , the called method * will be " < tt > getName ( ) < / tt > " . * @ param attributeName the name of the attribute * @ return the result of the method execution */ public static Function < Object , String [ ] > attrOfArrayOfString ( final String attributeName ) { } }
return new Get < Object , String [ ] > ( Types . ARRAY_OF_STRING , attributeName ) ;
public class AgentPremain { /** * Print the start message to System . err with the time NOW , and register a * shutdown hook which will print the stop message to System . err with the * time then and the number of milliseconds passed since . */ private static void printStartStopTimes ( ) { } }
final long start = System . currentTimeMillis ( ) ; System . err . println ( "Start at " + new Date ( ) ) ; Thread hook = new Thread ( ) { @ Override public void run ( ) { long timePassed = System . currentTimeMillis ( ) - start ; System . err . println ( "Stop at " + new Date ( ) + ", execution time = " + timePassed + " ms" ) ; } } ; Runtime . getRuntime ( ) . addShutdownHook ( hook ) ;
public class GetUserLists { /** * Usage : java twitter4j . examples . list . GetUserLists [ list owner screen name ] * @ param args message */ public static void main ( String [ ] args ) { } }
if ( args . length < 1 ) { System . out . println ( "Usage: java twitter4j.examples.list.GetUserLists [list owner screen name]" ) ; System . exit ( - 1 ) ; } try { Twitter twitter = new TwitterFactory ( ) . getInstance ( ) ; ResponseList < UserList > lists = twitter . getUserLists ( args [ 0 ] ) ; for ( UserList list : lists ) { System . out . println ( "id:" + list . getId ( ) + ", name:" + list . getName ( ) + ", description:" + list . getDescription ( ) + ", slug:" + list . getSlug ( ) + "" ) ; } System . exit ( 0 ) ; } catch ( TwitterException te ) { te . printStackTrace ( ) ; System . out . println ( "Failed to list the lists: " + te . getMessage ( ) ) ; System . exit ( - 1 ) ; }
public class ESigService { /** * Removes the item of the specified type and id from the list . * @ param esigType The esig type . * @ param id The unique id . */ @ Override public void remove ( IESigType esigType , String id ) { } }
eSigList . remove ( esigType , id ) ;
public class Utils { /** * Checks whether the given tuples specify a valid range for a sub - array * of an array with the given parent size , and throws an * < code > IllegalArgumentException < / code > if not . * @ param parentSize The parent size * @ param fromIndices The start indices , inclusive * @ param toIndices The end indices , exclusive * @ throws NullPointerException If any of the given tuples is * < code > null < / code > * @ throws IllegalArgumentException If the indices are not valid . This * is the case when the { @ link IntTuple # getSize ( ) size } of the start - or * end indices is different than the parent size , or when * < code > fromIndex & lt ; 0 < / code > or * < code > toIndex & gt ; parentSize . get ( i ) < / code > * or < code > fromIndex & gt ; toIndex < / code > for any dimension . */ public static void checkForValidSubArrayIndices ( IntTuple parentSize , IntTuple fromIndices , IntTuple toIndices ) { } }
if ( fromIndices . getSize ( ) != parentSize . getSize ( ) ) { throw new IllegalArgumentException ( "Parent is " + parentSize . getSize ( ) + "-dimensional, " + "but fromIndices is " + fromIndices . getSize ( ) + "-dimensional" ) ; } if ( toIndices . getSize ( ) != parentSize . getSize ( ) ) { throw new IllegalArgumentException ( "Parent is " + parentSize . getSize ( ) + "-dimensional, " + "but toIndices is " + toIndices . getSize ( ) + "-dimensional" ) ; } int n = parentSize . getSize ( ) ; for ( int i = 0 ; i < n ; i ++ ) { int p = parentSize . get ( i ) ; int f = fromIndices . get ( i ) ; int t = toIndices . get ( i ) ; if ( f < 0 || t > p || f > t ) { throw new IllegalArgumentException ( "Invalid index range: " + fromIndices + " to " + toIndices + " in " + parentSize ) ; } }
public class NarUtil { /** * Returns the header file name ( javah ) corresponding to the given class file * name * @ param filename * the absolute file name of the class * @ return the header file name . */ public static String getHeaderName ( final String basename , final String filename ) { } }
final String base = basename . replaceAll ( "\\\\" , "/" ) ; final String file = filename . replaceAll ( "\\\\" , "/" ) ; if ( ! file . startsWith ( base ) ) { throw new IllegalArgumentException ( "Error " + file + " does not start with " + base ) ; } String header = file . substring ( base . length ( ) + 1 ) ; header = header . replaceAll ( "/" , "_" ) ; header = header . replaceAll ( "\\.class" , ".h" ) ; return header ;