signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class ArrayUtils { /** * Sorts the elements in the given array .
* @ param < T > { @ link Class } type of the elements in the array .
* @ param array array of elements to sort .
* @ param comparator { @ link Comparator } used to sort ( order ) the elements in the array .
* @ return the given array sorted .
* @ see java . util . Comparator */
public static < T > T [ ] sort ( T [ ] array , Comparator < T > comparator ) { } }
|
Arrays . sort ( array , comparator ) ; return array ;
|
public class SystemUtil { /** * Append a suffix to the string ( e . g . filename ) if it doesn ' t have it already .
* @ param _ str string to check
* @ param _ suffix suffix to append
* @ return string with suffix or original if no suffix was appended */
public static String appendSuffixIfMissing ( String _str , String _suffix ) { } }
|
if ( _str == null ) { return null ; } if ( ! _str . endsWith ( _suffix ) ) { _str += _suffix ; } return _str ;
|
public class KeyVaultClientCustomImpl { /** * Creates a signature from a digest using the specified key .
* @ param keyIdentifier
* The full key identifier
* @ param algorithm
* algorithm identifier
* @ param value
* the content to be signed
* @ return the KeyOperationResult if successful . */
public KeyOperationResult sign ( String keyIdentifier , JsonWebKeySignatureAlgorithm algorithm , byte [ ] value ) { } }
|
KeyIdentifier id = new KeyIdentifier ( keyIdentifier ) ; return sign ( id . vault ( ) , id . name ( ) , id . version ( ) == null ? "" : id . version ( ) , algorithm , value ) ;
|
public class XSParser { /** * Parse an XML Schema document from String specified
* @ param schema String data to parse . If provided , this will always be treated as a
* sequence of 16 - bit units ( UTF - 16 encoded characters ) . If an XML
* declaration is present , the value of the encoding attribute
* will be ignored .
* @ param baseURI The base URI to be used for resolving relative
* URIs to absolute URIs . */
public XSModel parseString ( String schema , String baseURI ) { } }
|
return xsLoader . load ( new DOMInputImpl ( null , null , baseURI , schema , null ) ) ;
|
public class Request { /** * 切分并设置url参数 < br >
* 分隔符定义在HuluSetting中
* @ param urlParam url参数 */
protected static void splitAndSetParams ( String urlParam ) { } }
|
String [ ] urlParams = StrUtil . split ( urlParam , HuluSetting . urlParamSeparator ) ; urlParamsLocal . set ( urlParams ) ;
|
public class DefaultInputConnection { /** * Handles a group message . */
private void doGroupMessage ( final JsonObject message ) { } }
|
String groupID = message . getString ( "group" ) ; DefaultConnectionInputGroup group = groups . get ( groupID ) ; if ( group != null ) { Object value = deserializer . deserialize ( message ) ; if ( value != null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( String . format ( "%s - Group received: Group[group=%s, id=%d, message=%s" , this , groupID , message . getLong ( "id" ) , value ) ) ; } group . handleMessage ( value ) ; } }
|
public class AbstractRslNode { /** * Adds a rsl parse tree to this node .
* @ param node the rsl parse tree to add . */
public boolean add ( AbstractRslNode node ) { } }
|
if ( _specifications == null ) _specifications = new LinkedList ( ) ; return _specifications . add ( node ) ;
|
public class ListManagementTermsImpl { /** * Add a term to the term list with list Id equal to list Id passed .
* @ param listId List Id of the image list .
* @ param term Term to be deleted
* @ param language Language of the terms .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the Object object */
public Observable < Object > addTermAsync ( String listId , String term , String language ) { } }
|
return addTermWithServiceResponseAsync ( listId , term , language ) . map ( new Func1 < ServiceResponse < Object > , Object > ( ) { @ Override public Object call ( ServiceResponse < Object > response ) { return response . body ( ) ; } } ) ;
|
public class Base64EncodedSignerWithChooserByPrivateKeyIdImpl { /** * Signs a message .
* @ param privateKeyId the logical name of the private key as configured in
* the underlying mapping
* @ param message the message to sign
* @ return a base64 encoded version of the signature
* @ see # setPrivateKeyMap ( java . util . Map ) */
public String sign ( String privateKeyId , String message ) { } }
|
Base64EncodedSigner signer = cache . get ( privateKeyId ) ; if ( signer != null ) { return signer . sign ( message ) ; } Base64EncodedSignerImpl signerImpl = new Base64EncodedSignerImpl ( ) ; signerImpl . setAlgorithm ( algorithm ) ; signerImpl . setCharsetName ( charsetName ) ; signerImpl . setProvider ( provider ) ; PrivateKey privateKey = privateKeyMap . get ( privateKeyId ) ; if ( privateKey == null ) { throw new SignatureException ( "private key not found: privateKeyId=" + privateKeyId ) ; } signerImpl . setPrivateKey ( privateKey ) ; cache . put ( privateKeyId , signerImpl ) ; return signerImpl . sign ( message ) ;
|
public class ClassFileMetaData { /** * Checks if the constant pool contains a reference to a given method .
* @ param className must be provided JVM - style , such as { @ code java / lang / String }
* @ param descriptor must be provided JVM - style , such as { @ code ( IZ ) Ljava / lang / String ; } */
public boolean usesMethod ( String className , String methodName , String descriptor ) { } }
|
int classIndex = findClass ( className ) ; if ( classIndex == NOT_FOUND ) return false ; int nameAndTypeIndex = findNameAndType ( methodName , descriptor ) ; if ( nameAndTypeIndex == NOT_FOUND ) return false ; for ( int i = 1 ; i < maxPoolSize ; i ++ ) { if ( isMethod ( i ) && readValue ( offsets [ i ] ) == classIndex && readValue ( offsets [ i ] + 2 ) == nameAndTypeIndex ) return true ; } return false ;
|
public class LoglevelService { /** * 设置当前进程日志等级
* @ param level
* @ return */
public boolean setLevel ( String level ) { } }
|
boolean isSucceed = true ; try { LoggerContext loggerContext = ( LoggerContext ) LoggerFactory . getILoggerFactory ( ) ; loggerContext . getLogger ( "root" ) . setLevel ( Level . valueOf ( level ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; isSucceed = false ; } return isSucceed ;
|
public class ChangesOnMyIssueNotificationHandler { /** * Is the author of the change the assignee of the specified issue ?
* If not , it means the issue has been changed by a peer of the author of the change . */
private static boolean isPeerChanged ( Change change , ChangedIssue issue ) { } }
|
Optional < User > assignee = issue . getAssignee ( ) ; return ! assignee . isPresent ( ) || ! change . isAuthorLogin ( assignee . get ( ) . getLogin ( ) ) ;
|
public class DefaultGroovyMethods { /** * Iterates through the given array , passing in the initial value to
* the closure along with the first item . The result is passed back ( injected ) into
* the closure along with the second item . The new result is injected back into
* the closure along with the third item and so on until all elements of the array
* have been used . Also known as foldLeft in functional parlance .
* @ param self an Object [ ]
* @ param initialValue some initial value
* @ param closure a closure
* @ return the result of the last closure call
* @ see # inject ( Collection , Object , Closure )
* @ since 1.5.0 */
public static < E , T , U extends T , V extends T > T inject ( E [ ] self , U initialValue , @ ClosureParams ( value = FromString . class , options = "U,E" ) Closure < V > closure ) { } }
|
Object [ ] params = new Object [ 2 ] ; T value = initialValue ; for ( Object next : self ) { params [ 0 ] = value ; params [ 1 ] = next ; value = closure . call ( params ) ; } return value ;
|
public class Spies { /** * Proxies a function spying for result and parameter .
* @ param < T > the function parameter type
* @ param < R > the function result type
* @ param function the function to be spied
* @ param result a box that will be containing spied result
* @ param param a box that will be containing spied param
* @ return the proxied function */
public static < T , R > Function < T , R > spy ( Function < T , R > function , Box < R > result , Box < T > param ) { } }
|
return new CapturingFunction < > ( function , result , param ) ;
|
public class BaseDesktopMenu { /** * Gets the help set .
* @ return the help set */
public HelpSet getHelpSet ( ) { } }
|
HelpSet hs = null ; final String filename = "simple-hs.xml" ; final String path = "help/" + filename ; URL hsURL ; hsURL = ClassExtensions . getResource ( path ) ; try { if ( hsURL != null ) { hs = new HelpSet ( ClassExtensions . getClassLoader ( ) , hsURL ) ; } else { hs = new HelpSet ( ) ; } } catch ( final HelpSetException e ) { String title = e . getLocalizedMessage ( ) ; String htmlMessage = "<html><body width='650'>" + "<h2>" + title + "</h2>" + "<p>" + e . getMessage ( ) + "\n" + path ; JOptionPane . showMessageDialog ( this . getParent ( ) , htmlMessage , title , JOptionPane . ERROR_MESSAGE ) ; log . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } return hs ;
|
public class Props { /** * Gets the class from the Props . If it doesn ' t exist , it will return the defaultClass */
public Class < ? > getClass ( final String key , final Class < ? > defaultClass ) { } }
|
if ( containsKey ( key ) ) { return getClass ( key ) ; } else { return defaultClass ; }
|
public class nspbr6 { /** * Use this API to renumber nspbr6. */
public static base_response renumber ( nitro_service client ) throws Exception { } }
|
nspbr6 renumberresource = new nspbr6 ( ) ; return renumberresource . perform_operation ( client , "renumber" ) ;
|
public class HttpBuilder { /** * Executes a TRACE request on the configured URI , with additional configuration provided by the configuration function . The result will be cast to
* the specified ` type ` .
* This method is generally used for Java - specific configuration .
* [ source , groovy ]
* HttpBuilder http = HttpBuilder . configure ( config - > {
* config . getRequest ( ) . setUri ( " http : / / localhost : 10101 " ) ;
* String result = http . options ( String . class , config - > {
* config . getRequest ( ) . getUri ( ) . setPath ( " / foo " ) ;
* The ` configuration ` { @ link Consumer } allows additional configuration for this request based on the { @ link HttpConfig } interface .
* @ param type the type of the response content
* @ param configuration the additional configuration function ( delegated to { @ link HttpConfig } )
* @ return the resulting content cast to the specified type */
public < T > T trace ( final Class < T > type , final Consumer < HttpConfig > configuration ) { } }
|
return type . cast ( interceptors . get ( HttpVerb . TRACE ) . apply ( configureRequest ( type , HttpVerb . TRACE , configuration ) , this :: doTrace ) ) ;
|
public class GrailsClassUtils { /** * < p > Work out if the specified property is readable and static . Java introspection does not
* recognize this concept of static properties but Groovy does . We also consider public static fields
* as static properties with no getters / setters < / p >
* @ param clazz The class to check for static property
* @ param propertyName The property name
* @ return true if the property with name propertyName has a static getter method */
@ SuppressWarnings ( "rawtypes" ) public static boolean isStaticProperty ( Class clazz , String propertyName ) { } }
|
Method getter = BeanUtils . findDeclaredMethod ( clazz , getGetterName ( propertyName ) , ( Class [ ] ) null ) ; if ( getter != null ) { return isPublicStatic ( getter ) ; } try { Field f = clazz . getDeclaredField ( propertyName ) ; if ( f != null ) { return isPublicStatic ( f ) ; } } catch ( NoSuchFieldException ignored ) { // ignored
} return false ;
|
public class Histogram3D { /** * Create a plot canvas with the histogram plot of given data .
* @ param data a sample set .
* @ param k the number of bins .
* @ param palette the color palette . */
public static PlotCanvas plot ( double [ ] [ ] data , int k , Color [ ] palette ) { } }
|
return plot ( data , k , false , palette ) ;
|
public class CertificatesImpl { /** * Cancels a failed deletion of a certificate from the specified account .
* If you try to delete a certificate that is being used by a pool or compute node , the status of the certificate changes to deleteFailed . If you decide that you want to continue using the certificate , you can use this operation to set the status of the certificate back to active . If you intend to delete the certificate , you do not need to run this operation after the deletion failed . You must make sure that the certificate is not being used by any resources , and then you can try again to delete the certificate .
* @ param thumbprintAlgorithm The algorithm used to derive the thumbprint parameter . This must be sha1.
* @ param thumbprint The thumbprint of the certificate being deleted .
* @ param certificateCancelDeletionOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void cancelDeletion ( String thumbprintAlgorithm , String thumbprint , CertificateCancelDeletionOptions certificateCancelDeletionOptions ) { } }
|
cancelDeletionWithServiceResponseAsync ( thumbprintAlgorithm , thumbprint , certificateCancelDeletionOptions ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class ListPrincipalThingsResult { /** * The things .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setThings ( java . util . Collection ) } or { @ link # withThings ( java . util . Collection ) } if you want to override the
* existing values .
* @ param things
* The things .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListPrincipalThingsResult withThings ( String ... things ) { } }
|
if ( this . things == null ) { setThings ( new java . util . ArrayList < String > ( things . length ) ) ; } for ( String ele : things ) { this . things . add ( ele ) ; } return this ;
|
public class AttributeType { /** * Returns for given parameter < i > _ id < / i > the instance of class
* { @ link AttributeType } .
* @ param _ id id to search in the cache
* @ return instance of class { @ link AttributeType }
* @ see # CACHE
* @ throws CacheReloadException on error */
public static AttributeType get ( final long _id ) throws CacheReloadException { } }
|
final Cache < Long , AttributeType > cache = InfinispanCache . get ( ) . < Long , AttributeType > getCache ( AttributeType . IDCACHE ) ; if ( ! cache . containsKey ( _id ) ) { AttributeType . getAttributeTypeFromDB ( AttributeType . SQL_ID , _id ) ; } return cache . get ( _id ) ;
|
public class Type { /** * Creates a new instance of { @ code GenericDeclaration } when the given declaration is not empty other wise it
* returns { @ link GenericDeclaration # UNDEFINED } .
* @ param declaration
* declaration of a generic
* @ return instance of { @ code GenericDeclaration } and never null */
@ Nonnull private static GenericDeclaration createGenericDeclaration ( @ Nonnull final String declaration ) { } }
|
Check . notNull ( declaration , "declaration" ) ; return declaration . isEmpty ( ) ? GenericDeclaration . UNDEFINED : GenericDeclaration . of ( declaration ) ;
|
public class HealthMonitorTask { /** * Start the continuous health monitor . */
@ Scheduled ( fixedDelay = "${micronaut.health.monitor.interval:1m}" , initialDelay = "${micronaut.health.monitor.initial-delay:1m}" ) void monitor ( ) { } }
|
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Starting health monitor check" ) ; } List < Publisher < HealthResult > > healthResults = healthIndicators . stream ( ) . map ( HealthIndicator :: getResult ) . collect ( Collectors . toList ( ) ) ; Flowable < HealthResult > resultFlowable = Flowable . merge ( healthResults ) . filter ( healthResult -> { HealthStatus status = healthResult . getStatus ( ) ; return status . equals ( HealthStatus . DOWN ) || ! status . getOperational ( ) . orElse ( true ) ; } ) ; resultFlowable . firstElement ( ) . subscribe ( new MaybeObserver < HealthResult > ( ) { @ Override public void onSubscribe ( Disposable d ) { } @ Override public void onSuccess ( HealthResult healthResult ) { HealthStatus status = healthResult . getStatus ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Health monitor check failed with status {}" , status ) ; } currentHealthStatus . update ( status ) ; } @ Override public void onError ( Throwable e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( "Health monitor check failed with exception: " + e . getMessage ( ) , e ) ; } currentHealthStatus . update ( HealthStatus . DOWN . describe ( "Error occurred running health check: " + e . getMessage ( ) ) ) ; } @ Override public void onComplete ( ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Health monitor check passed." ) ; } currentHealthStatus . update ( HealthStatus . UP ) ; } } ) ;
|
public class Bus { /** * Get bus instance by object class name
* @ param busName bus name
* @ return event bus */
static public Bus getBy ( Object busName ) { } }
|
if ( busName == null ) return get ( ) ; return getBy ( busName . getClass ( ) ) ;
|
public class UnicodeDecompressor { /** * Decompress a byte array into a Unicode character array .
* This function will either completely fill the output buffer ,
* or consume the entire input .
* @ param byteBuffer The byte buffer to decompress .
* @ param byteBufferStart The start of the byte run to decompress .
* @ param byteBufferLimit The limit of the byte run to decompress .
* @ param bytesRead A one - element array . If not null , on return
* the number of bytes read from byteBuffer .
* @ param charBuffer A buffer to receive the decompressed data .
* This buffer must be at minimum two characters in size .
* @ param charBufferStart The starting offset to which to write
* decompressed data .
* @ param charBufferLimit The limiting offset for writing
* decompressed data .
* @ return The number of Unicode characters written to charBuffer . */
public int decompress ( byte [ ] byteBuffer , int byteBufferStart , int byteBufferLimit , int [ ] bytesRead , char [ ] charBuffer , int charBufferStart , int charBufferLimit ) { } }
|
// the current position in the source byte buffer
int bytePos = byteBufferStart ; // the current position in the target char buffer
int ucPos = charBufferStart ; // the current byte from the source buffer
int aByte = 0x00 ; // charBuffer must be at least 2 chars in size
if ( charBuffer . length < 2 || ( charBufferLimit - charBufferStart ) < 2 ) throw new IllegalArgumentException ( "charBuffer.length < 2" ) ; // if our internal buffer isn ' t empty , flush its contents
// to the output buffer before doing any more decompression
if ( fBufferLength > 0 ) { int newBytes = 0 ; // fill the buffer completely , to guarantee one full character
if ( fBufferLength != BUFSIZE ) { newBytes = fBuffer . length - fBufferLength ; // verify there are newBytes bytes in byteBuffer
if ( byteBufferLimit - byteBufferStart < newBytes ) newBytes = byteBufferLimit - byteBufferStart ; System . arraycopy ( byteBuffer , byteBufferStart , fBuffer , fBufferLength , newBytes ) ; } // reset buffer length to 0 before recursive call
fBufferLength = 0 ; // call self recursively to decompress the buffer
int count = decompress ( fBuffer , 0 , fBuffer . length , null , charBuffer , charBufferStart , charBufferLimit ) ; // update the positions into the arrays
ucPos += count ; bytePos += newBytes ; } // the main decompression loop
mainLoop : while ( bytePos < byteBufferLimit && ucPos < charBufferLimit ) { switch ( fMode ) { case SINGLEBYTEMODE : // single - byte mode decompression loop
singleByteModeLoop : while ( bytePos < byteBufferLimit && ucPos < charBufferLimit ) { aByte = byteBuffer [ bytePos ++ ] & 0xFF ; switch ( aByte ) { // All bytes from 0x80 through 0xFF are remapped
// to chars or surrogate pairs according to the
// currently active window
case 0x80 : case 0x81 : case 0x82 : case 0x83 : case 0x84 : case 0x85 : case 0x86 : case 0x87 : case 0x88 : case 0x89 : case 0x8A : case 0x8B : case 0x8C : case 0x8D : case 0x8E : case 0x8F : case 0x90 : case 0x91 : case 0x92 : case 0x93 : case 0x94 : case 0x95 : case 0x96 : case 0x97 : case 0x98 : case 0x99 : case 0x9A : case 0x9B : case 0x9C : case 0x9D : case 0x9E : case 0x9F : case 0xA0 : case 0xA1 : case 0xA2 : case 0xA3 : case 0xA4 : case 0xA5 : case 0xA6 : case 0xA7 : case 0xA8 : case 0xA9 : case 0xAA : case 0xAB : case 0xAC : case 0xAD : case 0xAE : case 0xAF : case 0xB0 : case 0xB1 : case 0xB2 : case 0xB3 : case 0xB4 : case 0xB5 : case 0xB6 : case 0xB7 : case 0xB8 : case 0xB9 : case 0xBA : case 0xBB : case 0xBC : case 0xBD : case 0xBE : case 0xBF : case 0xC0 : case 0xC1 : case 0xC2 : case 0xC3 : case 0xC4 : case 0xC5 : case 0xC6 : case 0xC7 : case 0xC8 : case 0xC9 : case 0xCA : case 0xCB : case 0xCC : case 0xCD : case 0xCE : case 0xCF : case 0xD0 : case 0xD1 : case 0xD2 : case 0xD3 : case 0xD4 : case 0xD5 : case 0xD6 : case 0xD7 : case 0xD8 : case 0xD9 : case 0xDA : case 0xDB : case 0xDC : case 0xDD : case 0xDE : case 0xDF : case 0xE0 : case 0xE1 : case 0xE2 : case 0xE3 : case 0xE4 : case 0xE5 : case 0xE6 : case 0xE7 : case 0xE8 : case 0xE9 : case 0xEA : case 0xEB : case 0xEC : case 0xED : case 0xEE : case 0xEF : case 0xF0 : case 0xF1 : case 0xF2 : case 0xF3 : case 0xF4 : case 0xF5 : case 0xF6 : case 0xF7 : case 0xF8 : case 0xF9 : case 0xFA : case 0xFB : case 0xFC : case 0xFD : case 0xFE : case 0xFF : // For offsets < = 0xFFFF , convert to a single char
// by adding the window ' s offset and subtracting
// the generic compression offset
if ( fOffsets [ fCurrentWindow ] <= 0xFFFF ) { charBuffer [ ucPos ++ ] = ( char ) ( aByte + fOffsets [ fCurrentWindow ] - COMPRESSIONOFFSET ) ; } // For offsets > 0x10000 , convert to a surrogate pair by
// normBase = window ' s offset - 0x10000
// high surr . = 0xD800 + ( normBase > > 10)
// low surr . = 0xDC00 + ( normBase & 0x3FF ) + ( byte & 0x7F )
else { // make sure there is enough room to write
// both characters
// if not , save state and break out
if ( ( ucPos + 1 ) >= charBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } int normalizedBase = fOffsets [ fCurrentWindow ] - 0x10000 ; charBuffer [ ucPos ++ ] = ( char ) ( 0xD800 + ( normalizedBase >> 10 ) ) ; charBuffer [ ucPos ++ ] = ( char ) ( 0xDC00 + ( normalizedBase & 0x3FF ) + ( aByte & 0x7F ) ) ; } break ; // bytes from 0x20 through 0x7F are treated as ASCII and
// are remapped to chars by padding the high byte
// ( this is the same as quoting from static window 0)
// NUL ( 0x00 ) , HT ( 0x09 ) , CR ( 0x0A ) , LF ( 0x0D )
// are treated as ASCII as well
case 0x00 : case 0x09 : case 0x0A : case 0x0D : case 0x20 : case 0x21 : case 0x22 : case 0x23 : case 0x24 : case 0x25 : case 0x26 : case 0x27 : case 0x28 : case 0x29 : case 0x2A : case 0x2B : case 0x2C : case 0x2D : case 0x2E : case 0x2F : case 0x30 : case 0x31 : case 0x32 : case 0x33 : case 0x34 : case 0x35 : case 0x36 : case 0x37 : case 0x38 : case 0x39 : case 0x3A : case 0x3B : case 0x3C : case 0x3D : case 0x3E : case 0x3F : case 0x40 : case 0x41 : case 0x42 : case 0x43 : case 0x44 : case 0x45 : case 0x46 : case 0x47 : case 0x48 : case 0x49 : case 0x4A : case 0x4B : case 0x4C : case 0x4D : case 0x4E : case 0x4F : case 0x50 : case 0x51 : case 0x52 : case 0x53 : case 0x54 : case 0x55 : case 0x56 : case 0x57 : case 0x58 : case 0x59 : case 0x5A : case 0x5B : case 0x5C : case 0x5D : case 0x5E : case 0x5F : case 0x60 : case 0x61 : case 0x62 : case 0x63 : case 0x64 : case 0x65 : case 0x66 : case 0x67 : case 0x68 : case 0x69 : case 0x6A : case 0x6B : case 0x6C : case 0x6D : case 0x6E : case 0x6F : case 0x70 : case 0x71 : case 0x72 : case 0x73 : case 0x74 : case 0x75 : case 0x76 : case 0x77 : case 0x78 : case 0x79 : case 0x7A : case 0x7B : case 0x7C : case 0x7D : case 0x7E : case 0x7F : charBuffer [ ucPos ++ ] = ( char ) aByte ; break ; // quote unicode
case SQUOTEU : // verify we have two bytes following tag
// if not , save state and break out
if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } aByte = byteBuffer [ bytePos ++ ] ; charBuffer [ ucPos ++ ] = ( char ) ( aByte << 8 | ( byteBuffer [ bytePos ++ ] & 0xFF ) ) ; break ; // switch to Unicode mode
case SCHANGEU : fMode = UNICODEMODE ; break singleByteModeLoop ; // break ;
// handle all quote tags
case SQUOTE0 : case SQUOTE1 : case SQUOTE2 : case SQUOTE3 : case SQUOTE4 : case SQUOTE5 : case SQUOTE6 : case SQUOTE7 : // verify there is a byte following the tag
// if not , save state and break out
if ( bytePos >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } // if the byte is in the range 0x00 - 0x7F , use
// static window n otherwise , use dynamic window n
int dByte = byteBuffer [ bytePos ++ ] & 0xFF ; charBuffer [ ucPos ++ ] = ( char ) ( dByte + ( dByte >= 0x00 && dByte < 0x80 ? sOffsets [ aByte - SQUOTE0 ] : ( fOffsets [ aByte - SQUOTE0 ] - COMPRESSIONOFFSET ) ) ) ; break ; // handle all change tags
case SCHANGE0 : case SCHANGE1 : case SCHANGE2 : case SCHANGE3 : case SCHANGE4 : case SCHANGE5 : case SCHANGE6 : case SCHANGE7 : fCurrentWindow = aByte - SCHANGE0 ; break ; // handle all define tags
case SDEFINE0 : case SDEFINE1 : case SDEFINE2 : case SDEFINE3 : case SDEFINE4 : case SDEFINE5 : case SDEFINE6 : case SDEFINE7 : // verify there is a byte following the tag
// if not , save state and break out
if ( bytePos >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } fCurrentWindow = aByte - SDEFINE0 ; fOffsets [ fCurrentWindow ] = sOffsetTable [ byteBuffer [ bytePos ++ ] & 0xFF ] ; break ; // handle define extended tag
case SDEFINEX : // verify we have two bytes following tag
// if not , save state and break out
if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } aByte = byteBuffer [ bytePos ++ ] & 0xFF ; fCurrentWindow = ( aByte & 0xE0 ) >> 5 ; fOffsets [ fCurrentWindow ] = 0x10000 + ( 0x80 * ( ( ( aByte & 0x1F ) << 8 ) | ( byteBuffer [ bytePos ++ ] & 0xFF ) ) ) ; break ; // reserved , shouldn ' t happen
case SRESERVED : break ; } // end switch
} // end while
break ; case UNICODEMODE : // unicode mode decompression loop
unicodeModeLoop : while ( bytePos < byteBufferLimit && ucPos < charBufferLimit ) { aByte = byteBuffer [ bytePos ++ ] & 0xFF ; switch ( aByte ) { // handle all define tags
case UDEFINE0 : case UDEFINE1 : case UDEFINE2 : case UDEFINE3 : case UDEFINE4 : case UDEFINE5 : case UDEFINE6 : case UDEFINE7 : // verify there is a byte following tag
// if not , save state and break out
if ( bytePos >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } fCurrentWindow = aByte - UDEFINE0 ; fOffsets [ fCurrentWindow ] = sOffsetTable [ byteBuffer [ bytePos ++ ] & 0xFF ] ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; // break ;
// handle define extended tag
case UDEFINEX : // verify we have two bytes following tag
// if not , save state and break out
if ( ( bytePos + 1 ) >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } aByte = byteBuffer [ bytePos ++ ] & 0xFF ; fCurrentWindow = ( aByte & 0xE0 ) >> 5 ; fOffsets [ fCurrentWindow ] = 0x10000 + ( 0x80 * ( ( ( aByte & 0x1F ) << 8 ) | ( byteBuffer [ bytePos ++ ] & 0xFF ) ) ) ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; // break ;
// handle all change tags
case UCHANGE0 : case UCHANGE1 : case UCHANGE2 : case UCHANGE3 : case UCHANGE4 : case UCHANGE5 : case UCHANGE6 : case UCHANGE7 : fCurrentWindow = aByte - UCHANGE0 ; fMode = SINGLEBYTEMODE ; break unicodeModeLoop ; // break ;
// quote unicode
case UQUOTEU : // verify we have two bytes following tag
// if not , save state and break out
if ( bytePos >= byteBufferLimit - 1 ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } aByte = byteBuffer [ bytePos ++ ] ; charBuffer [ ucPos ++ ] = ( char ) ( aByte << 8 | ( byteBuffer [ bytePos ++ ] & 0xFF ) ) ; break ; default : // verify there is a byte following tag
// if not , save state and break out
if ( bytePos >= byteBufferLimit ) { -- bytePos ; System . arraycopy ( byteBuffer , bytePos , fBuffer , 0 , byteBufferLimit - bytePos ) ; fBufferLength = byteBufferLimit - bytePos ; bytePos += fBufferLength ; break mainLoop ; } charBuffer [ ucPos ++ ] = ( char ) ( aByte << 8 | ( byteBuffer [ bytePos ++ ] & 0xFF ) ) ; break ; } // end switch
} // end while
break ; } // end switch ( fMode )
} // end while
// fill in output parameter
if ( bytesRead != null ) bytesRead [ 0 ] = ( bytePos - byteBufferStart ) ; // return # of chars written
return ( ucPos - charBufferStart ) ;
|
public class RendererFactory { /** * Gets the best suited { @ link Renderer } for the given { @ link Renderable } to
* the given { @ link RenderingFormat } .
* @ param < I >
* @ param < O >
* @ param renderable
* @ param renderingFormat
* @ return */
public < I extends Renderable , O > Renderer < ? super I , ? extends O > getRenderer ( final I renderable , final Class < ? extends RenderingFormat < ? extends O > > renderingFormat ) { } }
|
RendererSelection bestMatch = null ; final Collection < RendererBeanDescriptor < ? > > descriptors = _descriptorProvider . getRendererBeanDescriptorsForRenderingFormat ( renderingFormat ) ; for ( final RendererBeanDescriptor < ? > descriptor : descriptors ) { final RendererSelection rendererMatch = isRendererMatch ( descriptor , renderable , bestMatch ) ; if ( rendererMatch != null ) { bestMatch = rendererMatch ; } } if ( bestMatch == null ) { logger . warn ( "Didn't find any matches for renderable {} (format={})" , renderable , renderingFormat ) ; return null ; } @ SuppressWarnings ( "unchecked" ) final Renderer < ? super I , ? extends O > renderer = ( Renderer < ? super I , ? extends O > ) bestMatch . getRenderer ( ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "Returning renderer '{}' for renderable '{}' in format '{}'" , new Object [ ] { renderer , renderable . getClass ( ) . getName ( ) , renderingFormat . getName ( ) } ) ; } return renderer ;
|
public class JaxWsSSLManager { /** * Get the SSLSocketFactory by sslRef , if could not get the configuration , try use the server ' s default
* ssl configuration when fallbackOnDefault = true
* @ param sslRef
* @ param props the additional props to override the properties in SSLConfig
* @ param fallbackOnDefault if true , will fall back on server default ssl configuration
* @ return */
public static SSLSocketFactory getSSLSocketFactoryBySSLRef ( String sslRef , Map < String , Object > props , boolean fallbackOnDefault ) { } }
|
SSLSupport sslSupportService = tryGetSSLSupport ( ) ; if ( null == sslSupportService ) { return null ; } JSSEHelper jsseHelper = sslSupportService . getJSSEHelper ( ) ; Properties sslConfig = null ; SSLConfig sslConfigCopy = null ; try { sslConfig = jsseHelper . getProperties ( sslRef ) ; if ( null != sslConfig ) { // must copy one
sslConfigCopy = new SSLConfig ( sslConfig ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Cannot get the ssl configuration by sslRef=" + sslRef ) ; } if ( fallbackOnDefault ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Try to get the default ssl configuration of server" ) ; } // maybe use something like : < sslDefault sslRef = " myDefaultSSLConfig " >
sslConfig = jsseHelper . getProperties ( null , null , null ) ; if ( null != sslConfig ) { sslConfigCopy = new SSLConfig ( sslConfig ) ; } } } if ( null == sslConfigCopy ) { return null ; } // override the existed property in SSLConfig
if ( null != props && ! props . isEmpty ( ) ) { Iterator < Map . Entry < String , Object > > iter = props . entrySet ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { Map . Entry < String , Object > entry = iter . next ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , entry . getKey ( ) + "=" + entry . getValue ( ) + " is overriden in SSLConfig=" + sslRef ) ; } sslConfigCopy . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Get the SSLSocketFactory by sslRef=" + sslRef ) ; } return sslSupportService . getJSSEProvider ( ) . getSSLSocketFactory ( null , sslConfigCopy ) ; } catch ( SSLException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "err.when.get.ssl.config" , sslRef ) ; } throw new IllegalArgumentException ( e ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "err.when.get.ssl.socket.factory" , sslRef ) ; } throw new IllegalStateException ( e ) ; }
|
public class BDDMockito { /** * see original { @ link Mockito # doThrow ( Class ) }
* @ since 1.9.0 */
public static BDDStubber willThrow ( Class < ? extends Throwable > toBeThrown , Class < ? extends Throwable > ... throwableTypes ) { } }
|
return new BDDStubberImpl ( Mockito . doThrow ( toBeThrown , throwableTypes ) ) ;
|
public class Unchecked { /** * Wrap a { @ link CheckedBiFunction } in a { @ link BiFunction } with a custom handler for checked exceptions .
* Example :
* < code > < pre >
* map . computeIfPresent ( " key " , Unchecked . biFunction (
* ( k , v ) - > {
* if ( k = = null | | v = = null )
* throw new Exception ( " No nulls allowed in map " ) ;
* return 42;
* throw new IllegalStateException ( e ) ;
* < / pre > < / code > */
public static < T , U , R > BiFunction < T , U , R > biFunction ( CheckedBiFunction < T , U , R > function , Consumer < Throwable > handler ) { } }
|
return ( t , u ) -> { try { return function . apply ( t , u ) ; } catch ( Throwable e ) { handler . accept ( e ) ; throw new IllegalStateException ( "Exception handler must throw a RuntimeException" , e ) ; } } ;
|
public class RSAUtils { /** * Verify a signature with RSA public key , using { @ link # DEFAULT _ SIGNATURE _ ALGORITHM } .
* @ param keyData
* RSA public key data ( value of { @ link RSAPublicKey # getEncoded ( ) } )
* @ param message
* @ param signature
* @ return
* @ throws InvalidKeyException
* @ throws NoSuchAlgorithmException
* @ throws InvalidKeySpecException
* @ throws SignatureException */
public static boolean verifySignatureWithPublicKey ( byte [ ] keyData , byte [ ] message , byte [ ] signature ) throws InvalidKeyException , NoSuchAlgorithmException , InvalidKeySpecException , SignatureException { } }
|
return verifySignatureWithPublicKey ( keyData , message , signature , DEFAULT_SIGNATURE_ALGORITHM ) ;
|
public class PersonNatureAttr { /** * 设置
* @ param index
* @ param freq */
public void addFreq ( int index , int freq ) { } }
|
switch ( index ) { case 11 : this . end += freq ; allFreq += freq ; break ; case 12 : this . end += freq ; this . begin += freq ; allFreq += freq ; break ; case 44 : this . split += freq ; allFreq += freq ; break ; }
|
public class StatusCmd { public Any execute ( DeviceImpl device , Any in_any ) throws DevFailed { } }
|
Util . out4 . println ( "Status::execute(): arrived" ) ; // return status string as CORBA _ Any
Any out_any = Util . instance ( ) . get_orb ( ) . create_any ( ) ; String s = device . dev_status ( ) ; out_any . insert_string ( s ) ; Util . out4 . println ( "Leaving Status::execute()" ) ; return out_any ;
|
public class ISO9075 { /** * Encodes the local part of < code > name < / code > as specified in ISO 9075.
* @ param name
* the < code > QName < / code > to encode .
* @ return the encoded < code > QName < / code > or < code > name < / code > if it does not need encoding . */
public static InternalQName encode ( InternalQName name ) { } }
|
String encoded = encode ( name . getName ( ) ) ; if ( encoded . equals ( name . getName ( ) ) ) { return name ; } else { return new InternalQName ( name . getNamespace ( ) , encoded ) ; }
|
public class GroupService { /** * Find group based on config instance and parent group identifier
* @ param config group hierarchy config
* @ param parentGroupId parent group id
* @ return GroupMetadata */
private GroupMetadata findGroup ( GroupHierarchyConfig config , String parentGroupId ) { } }
|
GroupMetadata parentGroup = findByRef ( Group . refById ( parentGroupId ) ) ; return parentGroup . getGroups ( ) . stream ( ) . filter ( group -> group . getName ( ) . equals ( config . getName ( ) ) ) . findFirst ( ) . orElse ( null ) ;
|
public class DateUtils { /** * Returns a < code > ZonedDateTime < / code > from the given epoch value .
* Note that this implementation attempts to protect against caller providing timestamps in
* different units . For example , Unix timestamps are the number of SECONDS since January 1,
* 1970 , while Java Dates are number of MILLISECONDS since January 1 , 1970.
* @ param epoch
* timestamp value in seconds , milliseconds or microseconds
* @ return a < code > ZonedDateTime < / code > or null if the date is not valid */
public static ZonedDateTime toZonedDateTimeUtc ( final long epoch ) { } }
|
final EpochUnits units = EpochUnits . valueOf ( epoch ) ; return toZonedDateTimeUtc ( epoch , units ) ;
|
public class EvaluateClustering { /** * Evaluate a clustering result .
* @ param db Database
* @ param c Clustering
* @ param refc Reference clustering */
protected void evaluteResult ( Database db , Clustering < ? > c , Clustering < ? > refc ) { } }
|
ClusterContingencyTable contmat = new ClusterContingencyTable ( selfPairing , noiseSpecialHandling ) ; contmat . process ( refc , c ) ; ScoreResult sr = new ScoreResult ( contmat ) ; sr . addHeader ( c . getLongName ( ) ) ; db . getHierarchy ( ) . add ( c , sr ) ;
|
public class Client { /** * Performs all actions that have been configured . */
public void performActions ( ) { } }
|
if ( this . clientConfiguration . getActions ( ) . isEmpty ( ) ) { this . clientConfiguration . printHelp ( ) ; return ; } this . dumpProcessingController . setOfflineMode ( this . clientConfiguration . getOfflineMode ( ) ) ; if ( this . clientConfiguration . getDumpDirectoryLocation ( ) != null ) { try { this . dumpProcessingController . setDownloadDirectory ( this . clientConfiguration . getDumpDirectoryLocation ( ) ) ; } catch ( IOException e ) { logger . error ( "Could not set download directory to " + this . clientConfiguration . getDumpDirectoryLocation ( ) + ": " + e . getMessage ( ) ) ; logger . error ( "Aborting" ) ; return ; } } dumpProcessingController . setLanguageFilter ( this . clientConfiguration . getFilterLanguages ( ) ) ; dumpProcessingController . setSiteLinkFilter ( this . clientConfiguration . getFilterSiteKeys ( ) ) ; dumpProcessingController . setPropertyFilter ( this . clientConfiguration . getFilterProperties ( ) ) ; MwDumpFile dumpFile = this . clientConfiguration . getLocalDumpFile ( ) ; if ( dumpFile == null ) { dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . JSON ) ; } else { if ( ! dumpFile . isAvailable ( ) ) { logger . error ( "Dump file not found or not readable: " + dumpFile . toString ( ) ) ; return ; } } this . clientConfiguration . setProjectName ( dumpFile . getProjectName ( ) ) ; this . clientConfiguration . setDateStamp ( dumpFile . getDateStamp ( ) ) ; boolean hasReadyProcessor = false ; for ( DumpProcessingAction props : this . clientConfiguration . getActions ( ) ) { if ( ! props . isReady ( ) ) { continue ; } if ( props . needsSites ( ) ) { prepareSites ( ) ; if ( this . sites == null ) { // sites unavailable
continue ; } props . setSites ( this . sites ) ; } props . setDumpInformation ( dumpFile . getProjectName ( ) , dumpFile . getDateStamp ( ) ) ; this . dumpProcessingController . registerEntityDocumentProcessor ( props , null , true ) ; hasReadyProcessor = true ; } if ( ! hasReadyProcessor ) { return ; // silent ; non - ready action should report its problem
// directly
} if ( ! this . clientConfiguration . isQuiet ( ) ) { EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor ( 0 ) ; this . dumpProcessingController . registerEntityDocumentProcessor ( entityTimerProcessor , null , true ) ; } openActions ( ) ; this . dumpProcessingController . processDump ( dumpFile ) ; closeActions ( ) ; try { writeReport ( ) ; } catch ( IOException e ) { logger . error ( "Could not print report file: " + e . getMessage ( ) ) ; }
|
public class InstanceRegistry { /** * Remove a specific instance from services
* @ param id the instances id to unregister
* @ return the id of the unregistered instance */
public Mono < InstanceId > deregister ( InstanceId id ) { } }
|
return repository . computeIfPresent ( id , ( key , instance ) -> Mono . just ( instance . deregister ( ) ) ) . map ( Instance :: getId ) ;
|
public class OperaProxy { /** * Specifies which proxy to use for SOCKS . Currently only supported in { @ link
* com . opera . core . systems . OperaDriver } .
* @ param host the proxy host , expected format is < code > hostname . com : 1234 < / code > */
public void setSocksProxy ( String host ) { } }
|
assertNotMobile ( ) ; setProxyValue ( SOCKS_SERVER , host ) ; setProxyValue ( USE_SOCKS , host != null ) ;
|
public class CmsBrokenLinkRenderer { /** * Adds optional page information to the broken link bean . < p >
* @ param bean the broken link bean
* @ param extraTitle the optional page title
* @ param extraPath the optional page path */
protected void addPageInfo ( CmsBrokenLinkBean bean , String extraTitle , String extraPath ) { } }
|
if ( extraTitle != null ) { bean . addInfo ( messagePageTitle ( ) , "" + extraTitle ) ; } if ( extraPath != null ) { bean . addInfo ( messagePagePath ( ) , "" + extraPath ) ; }
|
public class ScanSpec { /** * Sort prefixes to ensure correct whitelist / blacklist evaluation ( see Issue # 167 ) . */
public void sortPrefixes ( ) { } }
|
for ( final Field field : ScanSpec . class . getDeclaredFields ( ) ) { if ( WhiteBlackList . class . isAssignableFrom ( field . getType ( ) ) ) { try { ( ( WhiteBlackList ) field . get ( this ) ) . sortPrefixes ( ) ; } catch ( final ReflectiveOperationException e ) { throw ClassGraphException . newClassGraphException ( "Field is not accessible: " + field , e ) ; } } }
|
public class BaseBo { /** * Get a BO ' s attribute as a date . If the attribute value is a string , parse
* it as a { @ link Date } using the specified date - time format .
* @ param attrName
* @ param dateTimeFormat
* @ return
* @ since 0.8.0 */
public Date getAttributeAsDate ( String attrName , String dateTimeFormat ) { } }
|
Lock lock = lockForRead ( ) ; try { return MapUtils . getDate ( attributes , attrName , dateTimeFormat ) ; } finally { lock . unlock ( ) ; }
|
public class LogBuffer { /** * Return fix - length string from buffer without null - terminate checking . Fix
* bug # 17 { @ link https : / / github . com / AlibabaTech / canal / issues / 17 } */
public final String getFullString ( final int pos , final int len , String charsetName ) { } }
|
if ( pos + len > limit || pos < 0 ) throw new IllegalArgumentException ( "limit excceed: " + ( pos < 0 ? pos : ( pos + len ) ) ) ; try { return new String ( buffer , origin + pos , len , charsetName ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalArgumentException ( "Unsupported encoding: " + charsetName , e ) ; }
|
public class FsParserAbstract { /** * Add to bulk an IndexRequest in JSon format */
private void esIndex ( String index , String id , String json , String pipeline ) { } }
|
logger . debug ( "Indexing {}/{}?pipeline={}" , index , id , pipeline ) ; logger . trace ( "JSon indexed : {}" , json ) ; if ( ! closed ) { esClient . index ( index , id , json , pipeline ) ; } else { logger . warn ( "trying to add new file while closing crawler. Document [{}]/[{}] has been ignored" , index , id ) ; }
|
public class JobScheduleUpdateOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has been modified since the specified time .
* @ param ifModifiedSince the ifModifiedSince value to set
* @ return the JobScheduleUpdateOptions object itself . */
public JobScheduleUpdateOptions withIfModifiedSince ( DateTime ifModifiedSince ) { } }
|
if ( ifModifiedSince == null ) { this . ifModifiedSince = null ; } else { this . ifModifiedSince = new DateTimeRfc1123 ( ifModifiedSince ) ; } return this ;
|
public class DistBlockIntegrityMonitor { /** * Return true if succeed to start one job */
public static Job startOneJob ( Worker newWorker , Priority pri , Set < String > jobFiles , long detectTime , AtomicLong numFilesSubmitted , AtomicLong lastCheckingTime , long maxPendingJobs ) throws IOException , InterruptedException , ClassNotFoundException { } }
|
if ( lastCheckingTime != null ) { lastCheckingTime . set ( System . currentTimeMillis ( ) ) ; } String startTimeStr = dateFormat . format ( new Date ( ) ) ; String jobName = newWorker . JOB_NAME_PREFIX + "." + newWorker . jobCounter + "." + pri + "-pri" + "." + startTimeStr ; Job job = null ; synchronized ( jobFiles ) { if ( jobFiles . size ( ) == 0 ) { return null ; } newWorker . jobCounter ++ ; synchronized ( newWorker . jobIndex ) { if ( newWorker . jobIndex . size ( ) >= maxPendingJobs ) { // full
return null ; } job = newWorker . startJob ( jobName , jobFiles , pri , detectTime ) ; } numFilesSubmitted . addAndGet ( jobFiles . size ( ) ) ; jobFiles . clear ( ) ; } return job ;
|
public class CommerceTierPriceEntryLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows .
* @ param dynamicQuery the dynamic query
* @ return the matching rows */
@ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } }
|
return commerceTierPriceEntryPersistence . findWithDynamicQuery ( dynamicQuery ) ;
|
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p >
* < p > Find element with < b > exact math < / b > of specified class ( equals ) < / p >
* @ param cls class of element
* @ param < T > the element which calls this method
* @ return this element */
@ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setCls ( final String cls ) { } }
|
this . cls = cls ; return ( T ) this ;
|
public class NamespaceSupport { /** * Declare a Namespace prefix . All prefixes must be declared
* before they are referenced . For example , a SAX driver ( parser )
* would scan an element ' s attributes
* in two passes : first for namespace declarations ,
* then a second pass using { @ link # processName processName ( ) } to
* interpret prefixes against ( potentially redefined ) prefixes .
* < p > This method declares a prefix in the current Namespace
* context ; the prefix will remain in force until this context
* is popped , unless it is shadowed in a descendant context . < / p >
* < p > To declare the default element Namespace , use the empty string as
* the prefix . < / p >
* < p > Note that you must < em > not < / em > declare a prefix after
* you ' ve pushed and popped another Namespace context , or
* treated the declarations phase as complete by processing
* a prefixed name . < / p >
* < p > Note that there is an asymmetry in this library : { @ link
* # getPrefix getPrefix } will not return the " " prefix ,
* even if you have declared a default element namespace .
* To check for a default namespace ,
* you have to look it up explicitly using { @ link # getURI getURI } .
* This asymmetry exists to make it easier to look up prefixes
* for attribute names , where the default prefix is not allowed . < / p >
* @ param prefix The prefix to declare , or the empty string to
* indicate the default element namespace . This may never have
* the value " xml " or " xmlns " .
* @ param uri The Namespace URI to associate with the prefix .
* @ return true if the prefix was legal , false otherwise
* @ see # processName
* @ see # getURI
* @ see # getPrefix */
public boolean declarePrefix ( String prefix , String uri ) { } }
|
if ( prefix . equals ( "xml" ) || prefix . equals ( "xmlns" ) ) { return false ; } else { currentContext . declarePrefix ( prefix , uri ) ; return true ; }
|
public class ZonalDateTime { /** * / * [ deutsch ]
* < p > Vergleicht diese Instanz mit der angegebenen Instanz auf der globalen Zeitachse ( UTC ) . < / p >
* < p > Die lokalen Zeitstempel werden genau dann in Betracht gezogen , wenn die UTC - Zeitpunkte gleich sind .
* Beispiel : < / p >
* < pre >
* List & lt ; String & gt ; dates =
* Arrays . asList ( & quot ; Tue , 29 Feb 2016 17:45:00 CET & quot ; , & quot ; Tue , 29 Feb 2016 16:00:00 EST & quot ; ) ;
* TemporalFormatter & lt ; Moment & gt ; formatter =
* ChronoFormatter . ofMomentPattern (
* & quot ; EEE , dd MMM yyyy HH : mm : ss z & quot ; , PatternType . CLDR , Locale . ENGLISH , ZonalOffset . UTC ) ;
* ZonalDateTime maxDate =
* dates . stream ( )
* . map ( s - & gt ; ZonalDateTime . parse ( s , formatter ) )
* . max ( ZonalDateTime : : compareByMoment )
* . get ( ) ;
* System . out . println ( maxDate ) ; / / 2016-02-29T16UTC - 05:00 [ America / New _ York ]
* < / pre >
* @ param zdt other instance to be compared with
* @ return negative , zero or positive integer if this instance is earlier , simultaneous or later than given arg
* @ see # compareByLocalTimestamp ( ZonalDateTime )
* @ since 3.16/4.13 */
public int compareByMoment ( ZonalDateTime zdt ) { } }
|
int cmp = this . moment . compareTo ( zdt . moment ) ; if ( cmp == 0 ) { cmp = this . timestamp . compareTo ( zdt . timestamp ) ; } return cmp ;
|
public class J2EESecurityManager { /** * Determine if the user is authenticated . The default implementation is to use { @ code getUserPrincipal ( ) ! = null }
* on the HttpServletRequest in the ActionBeanContext .
* @ param bean the current action bean ; used for security decisions
* @ param handler the current event handler ; used for security decisions
* @ return { @ link Boolean # TRUE TRUE } if the user is authenticated , { @ link Boolean # FALSE FALSE } if not , and { @ code null } if undecided */
protected Boolean isUserAuthenticated ( ActionBean bean , Method handler ) { } }
|
return bean . getContext ( ) . getRequest ( ) . getUserPrincipal ( ) != null ;
|
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 4075:1 : ruleXBasicForLoopExpression returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' for ' otherlv _ 2 = ' ( ' ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ? otherlv _ 6 = ' ; ' ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ? otherlv _ 8 = ' ; ' ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ? otherlv _ 12 = ' ) ' ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) ) ) ; */
public final EObject ruleXBasicForLoopExpression ( ) throws RecognitionException { } }
|
EObject current = null ; Token otherlv_1 = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; Token otherlv_6 = null ; Token otherlv_8 = null ; Token otherlv_10 = null ; Token otherlv_12 = null ; EObject lv_initExpressions_3_0 = null ; EObject lv_initExpressions_5_0 = null ; EObject lv_expression_7_0 = null ; EObject lv_updateExpressions_9_0 = null ; EObject lv_updateExpressions_11_0 = null ; EObject lv_eachExpression_13_0 = null ; enterRule ( ) ; try { // InternalPureXbase . g : 4081:2 : ( ( ( ) otherlv _ 1 = ' for ' otherlv _ 2 = ' ( ' ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ? otherlv _ 6 = ' ; ' ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ? otherlv _ 8 = ' ; ' ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ? otherlv _ 12 = ' ) ' ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) ) ) )
// InternalPureXbase . g : 4082:2 : ( ( ) otherlv _ 1 = ' for ' otherlv _ 2 = ' ( ' ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ? otherlv _ 6 = ' ; ' ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ? otherlv _ 8 = ' ; ' ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ? otherlv _ 12 = ' ) ' ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) ) )
{ // InternalPureXbase . g : 4082:2 : ( ( ) otherlv _ 1 = ' for ' otherlv _ 2 = ' ( ' ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ? otherlv _ 6 = ' ; ' ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ? otherlv _ 8 = ' ; ' ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ? otherlv _ 12 = ' ) ' ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) ) )
// InternalPureXbase . g : 4083:3 : ( ) otherlv _ 1 = ' for ' otherlv _ 2 = ' ( ' ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ? otherlv _ 6 = ' ; ' ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ? otherlv _ 8 = ' ; ' ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ? otherlv _ 12 = ' ) ' ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) )
{ // InternalPureXbase . g : 4083:3 : ( )
// InternalPureXbase . g : 4084:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getXBasicForLoopExpressionAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 69 , FOLLOW_49 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getForKeyword_1 ( ) ) ; } otherlv_2 = ( Token ) match ( input , 15 , FOLLOW_57 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getLeftParenthesisKeyword_2 ( ) ) ; } // InternalPureXbase . g : 4098:3 : ( ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) * ) ?
int alt73 = 2 ; int LA73_0 = input . LA ( 1 ) ; if ( ( ( LA73_0 >= RULE_STRING && LA73_0 <= RULE_ID ) || ( LA73_0 >= 14 && LA73_0 <= 15 ) || ( LA73_0 >= 18 && LA73_0 <= 19 ) || LA73_0 == 28 || ( LA73_0 >= 44 && LA73_0 <= 45 ) || LA73_0 == 50 || ( LA73_0 >= 58 && LA73_0 <= 59 ) || LA73_0 == 61 || LA73_0 == 64 || LA73_0 == 66 || ( LA73_0 >= 69 && LA73_0 <= 80 ) ) ) { alt73 = 1 ; } switch ( alt73 ) { case 1 : // InternalPureXbase . g : 4099:4 : ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) *
{ // InternalPureXbase . g : 4099:4 : ( ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration ) )
// InternalPureXbase . g : 4100:5 : ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration )
{ // InternalPureXbase . g : 4100:5 : ( lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration )
// InternalPureXbase . g : 4101:6 : lv _ initExpressions _ 3_0 = ruleXExpressionOrVarDeclaration
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getInitExpressionsXExpressionOrVarDeclarationParserRuleCall_3_0_0 ( ) ) ; } pushFollow ( FOLLOW_58 ) ; lv_initExpressions_3_0 = ruleXExpressionOrVarDeclaration ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } add ( current , "initExpressions" , lv_initExpressions_3_0 , "org.eclipse.xtext.xbase.Xbase.XExpressionOrVarDeclaration" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalPureXbase . g : 4118:4 : ( otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) ) ) *
loop72 : do { int alt72 = 2 ; int LA72_0 = input . LA ( 1 ) ; if ( ( LA72_0 == 57 ) ) { alt72 = 1 ; } switch ( alt72 ) { case 1 : // InternalPureXbase . g : 4119:5 : otherlv _ 4 = ' , ' ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) )
{ otherlv_4 = ( Token ) match ( input , 57 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getCommaKeyword_3_1_0 ( ) ) ; } // InternalPureXbase . g : 4123:5 : ( ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration ) )
// InternalPureXbase . g : 4124:6 : ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration )
{ // InternalPureXbase . g : 4124:6 : ( lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration )
// InternalPureXbase . g : 4125:7 : lv _ initExpressions _ 5_0 = ruleXExpressionOrVarDeclaration
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getInitExpressionsXExpressionOrVarDeclarationParserRuleCall_3_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_58 ) ; lv_initExpressions_5_0 = ruleXExpressionOrVarDeclaration ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } add ( current , "initExpressions" , lv_initExpressions_5_0 , "org.eclipse.xtext.xbase.Xbase.XExpressionOrVarDeclaration" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop72 ; } } while ( true ) ; } break ; } otherlv_6 = ( Token ) match ( input , 13 , FOLLOW_57 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getSemicolonKeyword_4 ( ) ) ; } // InternalPureXbase . g : 4148:3 : ( ( lv _ expression _ 7_0 = ruleXExpression ) ) ?
int alt74 = 2 ; int LA74_0 = input . LA ( 1 ) ; if ( ( ( LA74_0 >= RULE_STRING && LA74_0 <= RULE_ID ) || ( LA74_0 >= 14 && LA74_0 <= 15 ) || LA74_0 == 28 || ( LA74_0 >= 44 && LA74_0 <= 45 ) || LA74_0 == 50 || ( LA74_0 >= 58 && LA74_0 <= 59 ) || LA74_0 == 61 || LA74_0 == 64 || LA74_0 == 66 || ( LA74_0 >= 69 && LA74_0 <= 80 ) ) ) { alt74 = 1 ; } switch ( alt74 ) { case 1 : // InternalPureXbase . g : 4149:4 : ( lv _ expression _ 7_0 = ruleXExpression )
{ // InternalPureXbase . g : 4149:4 : ( lv _ expression _ 7_0 = ruleXExpression )
// InternalPureXbase . g : 4150:5 : lv _ expression _ 7_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getExpressionXExpressionParserRuleCall_5_0 ( ) ) ; } pushFollow ( FOLLOW_59 ) ; lv_expression_7_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } set ( current , "expression" , lv_expression_7_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; } otherlv_8 = ( Token ) match ( input , 13 , FOLLOW_60 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_8 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getSemicolonKeyword_6 ( ) ) ; } // InternalPureXbase . g : 4171:3 : ( ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) * ) ?
int alt76 = 2 ; int LA76_0 = input . LA ( 1 ) ; if ( ( ( LA76_0 >= RULE_STRING && LA76_0 <= RULE_ID ) || ( LA76_0 >= 14 && LA76_0 <= 15 ) || LA76_0 == 28 || ( LA76_0 >= 44 && LA76_0 <= 45 ) || LA76_0 == 50 || ( LA76_0 >= 58 && LA76_0 <= 59 ) || LA76_0 == 61 || LA76_0 == 64 || LA76_0 == 66 || ( LA76_0 >= 69 && LA76_0 <= 80 ) ) ) { alt76 = 1 ; } switch ( alt76 ) { case 1 : // InternalPureXbase . g : 4172:4 : ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) ) ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) *
{ // InternalPureXbase . g : 4172:4 : ( ( lv _ updateExpressions _ 9_0 = ruleXExpression ) )
// InternalPureXbase . g : 4173:5 : ( lv _ updateExpressions _ 9_0 = ruleXExpression )
{ // InternalPureXbase . g : 4173:5 : ( lv _ updateExpressions _ 9_0 = ruleXExpression )
// InternalPureXbase . g : 4174:6 : lv _ updateExpressions _ 9_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getUpdateExpressionsXExpressionParserRuleCall_7_0_0 ( ) ) ; } pushFollow ( FOLLOW_38 ) ; lv_updateExpressions_9_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } add ( current , "updateExpressions" , lv_updateExpressions_9_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalPureXbase . g : 4191:4 : ( otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) ) ) *
loop75 : do { int alt75 = 2 ; int LA75_0 = input . LA ( 1 ) ; if ( ( LA75_0 == 57 ) ) { alt75 = 1 ; } switch ( alt75 ) { case 1 : // InternalPureXbase . g : 4192:5 : otherlv _ 10 = ' , ' ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) )
{ otherlv_10 = ( Token ) match ( input , 57 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_10 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getCommaKeyword_7_1_0 ( ) ) ; } // InternalPureXbase . g : 4196:5 : ( ( lv _ updateExpressions _ 11_0 = ruleXExpression ) )
// InternalPureXbase . g : 4197:6 : ( lv _ updateExpressions _ 11_0 = ruleXExpression )
{ // InternalPureXbase . g : 4197:6 : ( lv _ updateExpressions _ 11_0 = ruleXExpression )
// InternalPureXbase . g : 4198:7 : lv _ updateExpressions _ 11_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getUpdateExpressionsXExpressionParserRuleCall_7_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_38 ) ; lv_updateExpressions_11_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } add ( current , "updateExpressions" , lv_updateExpressions_11_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop75 ; } } while ( true ) ; } break ; } otherlv_12 = ( Token ) match ( input , 16 , FOLLOW_3 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_12 , grammarAccess . getXBasicForLoopExpressionAccess ( ) . getRightParenthesisKeyword_8 ( ) ) ; } // InternalPureXbase . g : 4221:3 : ( ( lv _ eachExpression _ 13_0 = ruleXExpression ) )
// InternalPureXbase . g : 4222:4 : ( lv _ eachExpression _ 13_0 = ruleXExpression )
{ // InternalPureXbase . g : 4222:4 : ( lv _ eachExpression _ 13_0 = ruleXExpression )
// InternalPureXbase . g : 4223:5 : lv _ eachExpression _ 13_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionAccess ( ) . getEachExpressionXExpressionParserRuleCall_9_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_eachExpression_13_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } set ( current , "eachExpression" , lv_eachExpression_13_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class PathUtil { /** * Obtains the parent of this Path , if exists , else null . For instance if the Path is " / my / path " , the parent will be
* " / my " . Each call will result in a new object reference , though subsequent calls upon the same Path will be equal
* by value .
* @ return
* @ param path
* The path whose parent context we should return */
static ArchivePath getParent ( final ArchivePath path ) { } }
|
// Precondition checks
assert path != null : "Path must be specified" ; // Get the last index of " / "
final String resolvedContext = PathUtil . optionallyRemoveFollowingSlash ( path . get ( ) ) ; final int lastIndex = resolvedContext . lastIndexOf ( ArchivePath . SEPARATOR ) ; // If it either doesn ' t occur or is the root
if ( lastIndex == - 1 || ( lastIndex == 0 && resolvedContext . length ( ) == 1 ) ) { // No parent present , return null
return null ; } // Get the parent context
final String sub = resolvedContext . substring ( 0 , lastIndex ) ; // Return
return new BasicPath ( sub ) ;
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcPointOnCurve ( ) { } }
|
if ( ifcPointOnCurveEClass == null ) { ifcPointOnCurveEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 365 ) ; } return ifcPointOnCurveEClass ;
|
public class SameDiff { /** * Associate the current SameDiff instance with all ops and variables .
* This is necessary to ensure that when dealing with shared state ( usually with a SameDiff function such
* as " grad " - the backward function ) we have the correct SameDiff instance set for all ops / SDVariables . < br >
* If this is not done , arrays and shapes could be fetched from the incorrect SameDiff instance for some methods */
protected void associateSameDiffWithOpsAndVariables ( ) { } }
|
for ( SDVariable var : variableMap ( ) . values ( ) ) { var . setSameDiff ( this ) ; } // for ( DifferentialFunction df : functionInstancesById . values ( ) ) {
for ( SameDiffOp op : ops . values ( ) ) { DifferentialFunction df = op . getOp ( ) ; df . setSameDiff ( this ) ; // TODO : This is ugly but seemingly necessary
// Finally , also set the SDVariable for each op
// Otherwise : could have an op pointing to this SameDiff instance , but op ' s SDVariable ' s sameDiff field pointing
// to another SameDiff instance . At which point , they could fetch shapes and arrays from some other instance
// ( i . e . , not from this one that is currently executing )
SDVariable [ ] args = df . args ( ) ; if ( args != null ) { for ( SDVariable arg : args ) { arg . setSameDiff ( this ) ; } } SDVariable [ ] outputs = df . outputVariables ( ) ; if ( outputs != null ) { for ( SDVariable out : outputs ) { out . setSameDiff ( this ) ; } } }
|
public class SyncCommandAction { /** * Returns true if a git location object is null or all of its values are
* empty or null .
* @ param location the location object to test .
* @ return true if the git location object is null or all of its values are empty or null , false otherwise . */
private static boolean isEmptyOrNull ( GitLocation location ) { } }
|
return location == null || ( StringUtils . isEmptyOrNull ( location . getRepository ( ) ) && StringUtils . isEmptyOrNull ( location . getBranch ( ) ) && StringUtils . isEmptyOrNull ( location . getRevision ( ) ) ) ;
|
public class ListUtil { /** * Replies the index of the given data in the given list according to a
* dichotomic search algorithm . Order between objects
* is given by { @ code comparator } .
* < p > This function assumes that the given list is sorted
* according to the given comparator .
* A dichotomic algorithm is used .
* @ param < T > is the type of the data to search for .
* @ param list is the list inside which the element should be searched .
* @ param comparator is the comparator used to sort the list .
* @ param elt is the element to search for .
* @ return the index at which the element is , or < code > - 1 < / code > if
* the element was not found . */
@ Pure public static < T > int indexOf ( List < T > list , Comparator < ? super T > comparator , T elt ) { } }
|
try { assert comparator != null ; assert list != null ; if ( elt == null ) { return - 1 ; } int first = 0 ; int last = list . size ( ) - 1 ; while ( last >= first ) { int center = ( first + last ) / 2 ; final T indata = list . get ( center ) ; final int cmp = comparator . compare ( elt , indata ) ; if ( cmp == 0 ) { do { -- center ; } while ( center >= 0 && comparator . compare ( elt , list . get ( center ) ) == 0 ) ; return center + 1 ; } else if ( cmp < 0 ) { last = center - 1 ; } else { first = center + 1 ; } } } catch ( AssertionError e ) { throw e ; } catch ( Throwable exception ) { } return - 1 ;
|
public class TraceVehicleDataSource { /** * Using the startingTime as the relative starting point , sleep this thread
* until the next timestamp would occur .
* @ param startingTime the relative starting time in milliseconds
* @ param timestamp the timestamp to wait for in milliseconds since the
* epoch */
private void waitForNextRecord ( long startingTime , long timestamp ) { } }
|
if ( mFirstTimestamp == 0 ) { mFirstTimestamp = timestamp ; Log . d ( TAG , "Storing " + timestamp + " as the first " + "timestamp of the trace file" ) ; } long targetTime = startingTime + ( timestamp - mFirstTimestamp ) ; long sleepDuration = Math . max ( targetTime - System . currentTimeMillis ( ) , 0 ) ; try { Thread . sleep ( sleepDuration ) ; } catch ( InterruptedException e ) { }
|
public class DefaultMetadataService { /** * Creates an entity , instance of the type .
* @ param entityInstanceDefinition json array of entity definitions
* @ return guids - list of guids */
@ Override public CreateUpdateEntitiesResult createEntities ( String entityInstanceDefinition ) throws AtlasException { } }
|
entityInstanceDefinition = ParamChecker . notEmpty ( entityInstanceDefinition , "Entity instance definition" ) ; ITypedReferenceableInstance [ ] typedInstances = deserializeClassInstances ( entityInstanceDefinition ) ; return createEntities ( typedInstances ) ;
|
public class Output { /** * Write out string
* @ param buf
* Byte buffer to write to
* @ param string
* String to write */
public static void putString ( IoBuffer buf , String string ) { } }
|
final byte [ ] encoded = encodeString ( string ) ; if ( encoded . length < AMF . LONG_STRING_LENGTH ) { // write unsigned short
buf . put ( ( byte ) ( ( encoded . length >> 8 ) & 0xff ) ) ; buf . put ( ( byte ) ( encoded . length & 0xff ) ) ; } else { buf . putInt ( encoded . length ) ; } buf . put ( encoded ) ;
|
public class PopulateFirstPartyAudienceSegments { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ param audienceSegmentId the ID of the first party audience segment to populate .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long audienceSegmentId ) throws RemoteException { } }
|
// Get the AudienceSegmentService .
AudienceSegmentServiceInterface audienceSegmentService = adManagerServices . get ( session , AudienceSegmentServiceInterface . class ) ; // Create a statement to only select a specified first party audience
// segment .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "WHERE id = :audienceSegmentId and type = :type" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "audienceSegmentId" , audienceSegmentId ) . withBindVariableValue ( "type" , "FIRST_PARTY" ) ; // Default for total result set size .
int totalResultSetSize = 0 ; do { // Get audience segments by statement .
AudienceSegmentPage page = audienceSegmentService . getAudienceSegmentsByStatement ( statementBuilder . toStatement ( ) ) ; if ( page . getResults ( ) != null ) { totalResultSetSize = page . getTotalResultSetSize ( ) ; int i = page . getStartIndex ( ) ; for ( AudienceSegment audienceSegment : page . getResults ( ) ) { System . out . printf ( "%d) Audience segment with ID %d and name '%s' will be populated.%n" , i ++ , audienceSegment . getId ( ) , audienceSegment . getName ( ) ) ; } } statementBuilder . increaseOffsetBy ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; } while ( statementBuilder . getOffset ( ) < totalResultSetSize ) ; System . out . printf ( "Number of audience segments to be populated: %d%n" , totalResultSetSize ) ; if ( totalResultSetSize > 0 ) { // Remove limit and offset from statement .
statementBuilder . removeLimitAndOffset ( ) ; // Create action .
PopulateAudienceSegments action = new PopulateAudienceSegments ( ) ; // Perform action .
UpdateResult result = audienceSegmentService . performAudienceSegmentAction ( action , statementBuilder . toStatement ( ) ) ; if ( result != null && result . getNumChanges ( ) > 0 ) { System . out . printf ( "Number of audience segments populated: %d%n" , result . getNumChanges ( ) ) ; } else { System . out . println ( "No audience segments were populated." ) ; } }
|
public class UnXARMojo { /** * Unzip xar artifact and its dependencies .
* @ throws ArchiverException error when unzip package . O
* @ throws MojoExecutionException error when unzip package . */
private void performUnArchive ( ) throws MojoExecutionException { } }
|
Artifact artifact = findArtifact ( ) ; getLog ( ) . debug ( String . format ( "Source XAR = [%s]" , artifact . getFile ( ) ) ) ; unpack ( artifact . getFile ( ) , this . outputDirectory , "XAR Plugin" , true , getIncludes ( ) , getExcludes ( ) ) ; unpackDependentXars ( artifact ) ;
|
public class PreferenceFragment { /** * Handles the extra of the arguments , which have been passed to the fragment , that allows to
* show the button , which allows to restore the preferences ' default values .
* @ param arguments
* The arguments , which have been passed to the fragment , as an instance of the class
* { @ link Bundle } . The arguments may not be null */
private void handleShowRestoreDefaultsButtonArgument ( @ NonNull final Bundle arguments ) { } }
|
boolean showButton = arguments . getBoolean ( EXTRA_SHOW_RESTORE_DEFAULTS_BUTTON , false ) ; showRestoreDefaultsButton ( showButton ) ;
|
public class TiledMap { /** * Return the name of a specific object from a specific group .
* @ param groupID
* Index of a group
* @ param objectID
* Index of an object
* @ return The name of an object or null , when error occurred */
public String getObjectName ( int groupID , int objectID ) { } }
|
if ( groupID >= 0 && groupID < objectGroups . size ( ) ) { ObjectGroup grp = ( ObjectGroup ) objectGroups . get ( groupID ) ; if ( objectID >= 0 && objectID < grp . objects . size ( ) ) { GroupObject object = ( GroupObject ) grp . objects . get ( objectID ) ; return object . name ; } } return null ;
|
public class PredicatedImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setPredicate ( RuleElement newPredicate ) { } }
|
if ( newPredicate != predicate ) { NotificationChain msgs = null ; if ( predicate != null ) msgs = ( ( InternalEObject ) predicate ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - SimpleAntlrPackage . PREDICATED__PREDICATE , null , msgs ) ; if ( newPredicate != null ) msgs = ( ( InternalEObject ) newPredicate ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - SimpleAntlrPackage . PREDICATED__PREDICATE , null , msgs ) ; msgs = basicSetPredicate ( newPredicate , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , SimpleAntlrPackage . PREDICATED__PREDICATE , newPredicate , newPredicate ) ) ;
|
public class NetworkManager { /** * Undeploys components that were removed from the network . */
private void undeployRemovedComponents ( final NetworkContext context , final NetworkContext runningContext , final Handler < AsyncResult < Void > > doneHandler ) { } }
|
// Undeploy any components that were removed from the network .
final List < ComponentContext < ? > > removedComponents = new ArrayList < > ( ) ; for ( ComponentContext < ? > runningComponent : runningContext . components ( ) ) { if ( context . component ( runningComponent . name ( ) ) == null ) { removedComponents . add ( runningComponent ) ; } } if ( ! removedComponents . isEmpty ( ) ) { final CountingCompletionHandler < Void > counter = new CountingCompletionHandler < Void > ( removedComponents . size ( ) ) ; counter . setHandler ( new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { new DefaultFutureResult < Void > ( result . cause ( ) ) . setHandler ( doneHandler ) ; } else { log . info ( String . format ( "%s - Removed %d components" , NetworkManager . this , removedComponents . size ( ) ) ) ; new DefaultFutureResult < Void > ( ( Void ) null ) . setHandler ( doneHandler ) ; } } } ) ; undeployComponents ( removedComponents , counter ) ; } else { new DefaultFutureResult < Void > ( ( Void ) null ) . setHandler ( doneHandler ) ; }
|
public class UpdateCommand { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . client . command . ReplCommand # execute ( org . jline . reader . LineReader , java . lang . String [ ] , jp . co . future . uroborosql . config . SqlConfig , java . util . Properties ) */
@ Override public boolean execute ( final LineReader reader , final String [ ] parts , final SqlConfig sqlConfig , final Properties props ) { } }
|
PrintWriter writer = reader . getTerminal ( ) . writer ( ) ; if ( parts . length >= 2 ) { String sqlName = parts [ 1 ] . replaceAll ( "\\." , "/" ) ; if ( sqlConfig . getSqlManager ( ) . existSql ( sqlName ) ) { try ( SqlAgent agent = sqlConfig . agent ( ) ) { SqlContext ctx = agent . contextFrom ( sqlName ) ; ctx . setSql ( sqlConfig . getSqlManager ( ) . getSql ( ctx . getSqlName ( ) ) ) ; String [ ] params = Arrays . copyOfRange ( parts , 2 , parts . length ) ; SqlParamUtils . setSqlParams ( ctx , params ) ; try { int ans = agent . update ( ctx ) ; agent . commit ( ) ; writer . println ( "update sql[" + sqlName + "] end. row count=" + ans ) ; } catch ( ParameterNotFoundRuntimeException | SQLException ex ) { writer . println ( "Error : " + ex . getMessage ( ) ) ; agent . rollback ( ) ; } } } else { writer . println ( "SQL not found. sql=" + sqlName ) ; } } writer . flush ( ) ; return true ;
|
public class MessageUtils { /** * Retrieve the message from a specific bundle . It does not look on application message bundle
* or default message bundle . If it is required to look on those bundles use getMessageFromBundle instead
* @ param bundleBaseName baseName of ResourceBundle to load localized messages
* @ param messageId id of message
* @ param params parameters to set at localized message
* @ return generated FacesMessage */
public static FacesMessage getMessage ( String bundleBaseName , String messageId , Object params [ ] ) { } }
|
return getMessage ( bundleBaseName , getCurrentLocale ( ) , messageId , params ) ;
|
public class FieldsAndGetters { /** * Returns a { @ code Stream } of all public fields and getter methods which match { @ code predicate } and their values for the given object .
* This method combines the results of { @ link # fields ( Object , Predicate ) } and { @ link # getters ( Object , Predicate ) } . The { @ code Predicate < String > }
* will be passed the field names and the getter names ( which are postfixed by { @ code ( ) } to mark them as methods ) .
* @ see # fields ( Object , Predicate )
* @ see # getters ( Object , Predicate ) */
public static Stream < Map . Entry < String , Object > > fieldsAndGetters ( Object obj , Predicate < String > predicate ) { } }
|
Stream < Map . Entry < String , Object > > fields = fields ( obj , field -> predicate . test ( field . getName ( ) ) ) . map ( entry -> createEntry ( entry . getKey ( ) . getName ( ) , entry . getValue ( ) ) ) ; Function < Method , String > methodName = method -> method . getName ( ) + "()" ; Stream < Map . Entry < String , Object > > getters = getters ( obj , field -> predicate . test ( methodName . apply ( field ) ) ) . map ( entry -> createEntry ( methodName . apply ( entry . getKey ( ) ) , entry . getValue ( ) ) ) ; return Stream . concat ( fields , getters ) ;
|
public class TypeQualifierApplications { /** * Get the effective TypeQualifierAnnotation on given AnnotatedObject . Takes
* into account inherited and default ( outer scope ) annotations . Also takes
* exclusive qualifiers into account .
* @ param o
* an AnnotatedObject
* @ param typeQualifierValue
* a TypeQualifierValue specifying kind of annotation we want to
* look up
* @ return the effective TypeQualifierAnnotation , or null if there is no
* effective TypeQualifierAnnotation on this AnnotatedObject */
public static TypeQualifierAnnotation getEffectiveTypeQualifierAnnotation ( AnnotatedObject o , TypeQualifierValue < ? > typeQualifierValue ) { } }
|
if ( o instanceof XMethod ) { XMethod m = ( XMethod ) o ; if ( m . getName ( ) . startsWith ( "access$" ) ) { InnerClassAccessMap icam = AnalysisContext . currentAnalysisContext ( ) . getInnerClassAccessMap ( ) ; try { InnerClassAccess ica = icam . getInnerClassAccess ( m . getClassName ( ) , m . getName ( ) ) ; if ( ica != null && ica . isLoad ( ) ) { o = ica . getField ( ) ; } } catch ( ClassNotFoundException e ) { AnalysisContext . reportMissingClass ( e ) ; return null ; } } } TypeQualifierAnnotation tqa = computeEffectiveTypeQualifierAnnotation ( typeQualifierValue , o ) ; final AnnotatedObject o2 = o ; if ( CHECK_EXCLUSIVE && tqa == null && typeQualifierValue . isExclusiveQualifier ( ) ) { tqa = computeExclusiveQualifier ( typeQualifierValue , new ComputeEffectiveTypeQualifierAnnotation ( ) { @ Override public TypeQualifierAnnotation compute ( TypeQualifierValue < ? > tqv ) { return computeEffectiveTypeQualifierAnnotation ( tqv , o2 ) ; } @ Override public String toString ( ) { return o2 . toString ( ) ; } } ) ; } return tqa ;
|
public class ClassUtil { /** * 获得指定类过滤后的Public方法列表
* @ param clazz 查找方法的类
* @ param excludeMethodNames 不包括的方法名列表
* @ return 过滤后的方法列表 */
public static List < Method > getPublicMethods ( Class < ? > clazz , String ... excludeMethodNames ) { } }
|
return getPublicMethods ( clazz , excludeMethodNames ) ;
|
public class MorseDistance { /** * Shift .
* @ param state the state */
private void _shift ( double [ ] [ ] state ) { } }
|
double [ ] tmpState = state [ 0 ] ; state [ 0 ] = state [ 1 ] ; state [ 1 ] = state [ 2 ] ; state [ 2 ] = tmpState ;
|
public class RedisClient { /** * add if not exists
* @ param key
* @ param value
* @ param expiration
* @ return false if redis did not execute the option
* @ throws Exception */
public boolean add ( String key , Object value , Integer expiration ) throws Exception { } }
|
Jedis jedis = null ; try { jedis = this . jedisPool . getResource ( ) ; long begin = System . currentTimeMillis ( ) ; // 操作setnx与expire成功返回1 , 失败返回0 , 仅当均返回1时 , 实际操作成功
Long result = jedis . setnx ( SafeEncoder . encode ( key ) , serialize ( value ) ) ; if ( expiration > 0 ) { result = result & jedis . expire ( key , expiration ) ; } long end = System . currentTimeMillis ( ) ; if ( result == 1L ) { logger . info ( "add key:" + key + ", spends: " + ( end - begin ) + "ms" ) ; } else { logger . info ( "add key: " + key + " failed, key has already exists! " ) ; } return result == 1L ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; this . jedisPool . returnBrokenResource ( jedis ) ; throw e ; } finally { if ( jedis != null ) { this . jedisPool . returnResource ( jedis ) ; } }
|
public class MappableJournalSegmentWriter { /** * Unmaps the mapped buffer . */
void unmap ( ) { } }
|
if ( writer instanceof MappedJournalSegmentWriter ) { JournalWriter < E > writer = this . writer ; this . writer = new FileChannelJournalSegmentWriter < > ( channel , segment , maxEntrySize , index , namespace ) ; writer . close ( ) ; }
|
public class JMSServices { /** * Uses the container - specific qualifier to look up a JMS queue .
* @ param commonName
* the vendor - neutral logical queue name
* @ return javax . jms . Queue */
public Queue getQueue ( Session session , String commonName ) throws ServiceLocatorException { } }
|
Queue queue = ( Queue ) queueCache . get ( commonName ) ; if ( queue == null ) { try { String name = namingProvider . qualifyJmsQueueName ( commonName ) ; queue = jmsProvider . getQueue ( session , namingProvider , name ) ; if ( queue != null ) queueCache . put ( commonName , queue ) ; } catch ( Exception ex ) { throw new ServiceLocatorException ( - 1 , ex . getMessage ( ) , ex ) ; } } return queue ;
|
public class Event { /** * Returns the signature ID to which this event is associated .
* @ return String
* @ throws HelloSignException thrown if there is a problem parsing the
* backing JSONObject . */
public String getRelatedSignatureId ( ) throws HelloSignException { } }
|
JSONObject metadata = ( JSONObject ) get ( EVENT_METADATA ) ; if ( metadata == null ) { return null ; } String id = null ; try { id = metadata . getString ( RELATED_SIGNATURE_ID ) ; } catch ( JSONException ex ) { // No related signature request with this event
} return id ;
|
public class DataUtil { /** * little - endian or intel format . */
public static long readUnsignedIntegerLittleEndian ( byte [ ] buffer , int offset ) { } }
|
long value ; value = ( buffer [ offset ] & 0xFF ) ; value |= ( buffer [ offset + 1 ] & 0xFF ) << 8 ; value |= ( buffer [ offset + 2 ] & 0xFF ) << 16 ; value |= ( ( long ) ( buffer [ offset + 3 ] & 0xFF ) ) << 24 ; return value ;
|
public class DefaultGroovyMethods { /** * Create a new Collection composed of the elements of the first Iterable minus
* every occurrence of elements of the given Iterable .
* < pre class = " groovyTestCase " >
* assert [ 1 , " a " , true , true , false , 5.3 ] - [ true , 5.3 ] = = [ 1 , " a " , false ]
* < / pre >
* @ param self an Iterable
* @ param removeMe an Iterable of elements to remove
* @ return a new Collection with the given elements removed
* @ since 2.4.0 */
public static < T > Collection < T > minus ( Iterable < T > self , Iterable < ? > removeMe ) { } }
|
return minus ( asCollection ( self ) , asCollection ( removeMe ) ) ;
|
public class CmsEncoder { /** * Decodes a String in a way similar to the JavaScript " decodeURIcomponent " function . < p >
* This method can decode Strings that have been encoded in JavaScript with " encodeURIcomponent " ,
* provided " UTF - 8 " is used as encoding . < p >
* < b > Directly exposed for JSP EL < b > , not through { @ link org . opencms . jsp . util . CmsJspElFunctions } . < p >
* @ param source The String to be decoded
* @ param encoding the encoding type
* @ return The decoded String */
public static String unescape ( String source , String encoding ) { } }
|
if ( source == null ) { return null ; } int len = source . length ( ) ; // to use standard decoder we need to replace ' + ' with " % 20 " ( space )
StringBuffer preparedSource = new StringBuffer ( len ) ; for ( int i = 0 ; i < len ; i ++ ) { char c = source . charAt ( i ) ; if ( c == '+' ) { preparedSource . append ( "%20" ) ; } else { preparedSource . append ( c ) ; } } return decode ( preparedSource . toString ( ) , encoding ) ;
|
public class TrackedTorrent { /** * Count the number of leechers ( non - COMPLETED peers ) on this torrent . */
public int leechers ( ) { } }
|
int count = 0 ; for ( TrackedPeer peer : this . peers . values ( ) ) { if ( ! peer . isCompleted ( ) ) { count ++ ; } } return count ;
|
public class AutoScalingGroup { /** * The metrics enabled for the group .
* @ param enabledMetrics
* The metrics enabled for the group . */
public void setEnabledMetrics ( java . util . Collection < EnabledMetric > enabledMetrics ) { } }
|
if ( enabledMetrics == null ) { this . enabledMetrics = null ; return ; } this . enabledMetrics = new com . amazonaws . internal . SdkInternalList < EnabledMetric > ( enabledMetrics ) ;
|
public class BaseOp { /** * 根据APPID , BUCKET , COS _ PATH生成经过URL编码的URL
* @ param request
* 基本类型的请求
* @ return URL字符串
* @ throws AbstractCosException */
protected String buildUrl ( AbstractBaseRequest request ) throws AbstractCosException { } }
|
String endPoint = this . config . getCosEndPoint ( ) ; int appId = this . cred . getAppId ( ) ; String bucketName = request . getBucketName ( ) ; String cosPath = request . getCosPath ( ) ; cosPath = CommonPathUtils . encodeRemotePath ( cosPath ) ; return String . format ( "%s/%s/%s%s" , endPoint , appId , bucketName , cosPath ) ;
|
public class ClientBroadcastStream { /** * Pushes a message out to a consumer .
* @ param msg
* StatusMessage */
protected void pushMessage ( StatusMessage msg ) { } }
|
if ( connMsgOut != null ) { try { connMsgOut . pushMessage ( msg ) ; } catch ( IOException err ) { log . error ( "Error while pushing message: {}" , msg , err ) ; } } else { log . warn ( "Consumer message output is null" ) ; }
|
public class GrammarFile { /** * This method does the parsing and reacts appropriately to any exceptions .
* @ param tokenStream
* is the token stream to read the grammar from .
* @ throws ParserException
* is thrown in case of a parser issue . */
private void parse ( TokenStream tokenStream ) throws ParserException { } }
|
try { Parser parser = new SLR1Parser ( uhuraGrammar ) ; parserTree = parser . parse ( tokenStream ) ; } catch ( GrammarException e ) { logger . error ( e . getMessage ( ) , e ) ; throw new RuntimeException ( "UhuraGrammar is broken!!!" ) ; }
|
public class YamlMappingNode { /** * Adds the specified { @ code key } / { @ code value } pair to this mapping .
* @ param key the key
* @ param value the value
* @ return { @ code this } */
public T put ( YamlNode key , byte value ) { } }
|
return put ( key , getNodeFactory ( ) . byteNode ( value ) ) ;
|
public class Document { /** * Adds the producer to a Document .
* @ return < CODE > true < / CODE > if successful , < CODE > false < / CODE > otherwise */
public boolean addProducer ( ) { } }
|
try { return add ( new Meta ( Element . PRODUCER , getVersion ( ) ) ) ; } catch ( DocumentException de ) { throw new ExceptionConverter ( de ) ; }
|
public class Instance { /** * The product codes attached to this instance , if applicable .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setProductCodes ( java . util . Collection ) } or { @ link # withProductCodes ( java . util . Collection ) } if you want to
* override the existing values .
* @ param productCodes
* The product codes attached to this instance , if applicable .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Instance withProductCodes ( ProductCode ... productCodes ) { } }
|
if ( this . productCodes == null ) { setProductCodes ( new com . amazonaws . internal . SdkInternalList < ProductCode > ( productCodes . length ) ) ; } for ( ProductCode ele : productCodes ) { this . productCodes . add ( ele ) ; } return this ;
|
public class JDBCRepository { /** * Returns the highest supported level for the given desired level .
* @ return null if not supported */
private static IsolationLevel selectIsolationLevel ( DatabaseMetaData md , IsolationLevel desiredLevel ) throws SQLException , RepositoryException { } }
|
while ( ! md . supportsTransactionIsolationLevel ( mapIsolationLevelToJdbc ( desiredLevel ) ) ) { switch ( desiredLevel ) { case READ_UNCOMMITTED : desiredLevel = IsolationLevel . READ_COMMITTED ; break ; case READ_COMMITTED : desiredLevel = IsolationLevel . REPEATABLE_READ ; break ; case REPEATABLE_READ : desiredLevel = IsolationLevel . SERIALIZABLE ; break ; case SNAPSHOT : desiredLevel = IsolationLevel . SERIALIZABLE ; break ; case SERIALIZABLE : default : return null ; } } return desiredLevel ;
|
public class CmsDefaultXmlContentHandler { /** * Initializes the folder containing the model file ( s ) for this content handler . < p >
* @ param root the " modelfolder " element from the appinfo node of the XML content definition
* @ param contentDefinition the content definition the model folder belongs to
* @ throws CmsXmlException if something goes wrong */
protected void initModelFolder ( Element root , CmsXmlContentDefinition contentDefinition ) throws CmsXmlException { } }
|
String master = root . attributeValue ( APPINFO_ATTR_URI ) ; if ( master == null ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_XMLCONTENT_MISSING_MODELFOLDER_URI_2 , root . getName ( ) , contentDefinition . getSchemaLocation ( ) ) ) ; } m_modelFolder = master ;
|
public class GCBOXImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case AfplibPackage . GCBOX__RES : return RES_EDEFAULT == null ? res != null : ! RES_EDEFAULT . equals ( res ) ; case AfplibPackage . GCBOX__XPOS1 : return XPOS1_EDEFAULT == null ? xpos1 != null : ! XPOS1_EDEFAULT . equals ( xpos1 ) ; case AfplibPackage . GCBOX__YPOS1 : return YPOS1_EDEFAULT == null ? ypos1 != null : ! YPOS1_EDEFAULT . equals ( ypos1 ) ; case AfplibPackage . GCBOX__HAXIS : return HAXIS_EDEFAULT == null ? haxis != null : ! HAXIS_EDEFAULT . equals ( haxis ) ; case AfplibPackage . GCBOX__VAXIS : return VAXIS_EDEFAULT == null ? vaxis != null : ! VAXIS_EDEFAULT . equals ( vaxis ) ; } return super . eIsSet ( featureID ) ;
|
public class Configuration { /** * Check that the backdrop size is valid
* @ param backdropSize
* @ return */
public boolean isValidBackdropSize ( String backdropSize ) { } }
|
if ( StringUtils . isBlank ( backdropSize ) || backdropSizes . isEmpty ( ) ) { return false ; } return backdropSizes . contains ( backdropSize ) ;
|
public class ERTrees { /** * Measures the statistics of feature importance from the trees in this
* forest .
* @ param < Type >
* @ param data the dataset to infer the feature importance from with respect
* to the current model .
* @ param imp the method of determing the feature importance that will be
* applied to each tree in this model
* @ return an array of statistics , which each index corresponds to a
* specific feature . Numeric features start from the zero index , categorical
* features start from the index equal to the number of numeric features . */
public < Type extends DataSet > OnLineStatistics [ ] evaluateFeatureImportance ( DataSet < Type > data , TreeFeatureImportanceInference imp ) { } }
|
OnLineStatistics [ ] importances = new OnLineStatistics [ data . getNumFeatures ( ) ] ; for ( int i = 0 ; i < importances . length ; i ++ ) importances [ i ] = new OnLineStatistics ( ) ; for ( ExtraTree tree : forrest ) { double [ ] feats = imp . getImportanceStats ( tree , data ) ; for ( int i = 0 ; i < importances . length ; i ++ ) importances [ i ] . add ( feats [ i ] ) ; } return importances ;
|
public class JarClassLoader { /** * 加载Jar到ClassPath
* @ param dir jar文件或所在目录
* @ return JarClassLoader */
public static JarClassLoader load ( File dir ) { } }
|
final JarClassLoader loader = new JarClassLoader ( ) ; loader . addJar ( dir ) ; // 查找加载所有jar
loader . addURL ( dir ) ; // 查找加载所有class
return loader ;
|
public class ApiOvhDedicatedserver { /** * Get this object properties
* REST : GET / dedicated / server / { serviceName } / statistics / raid / { unit }
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param unit [ required ] Raid unit */
public OvhRtmRaid serviceName_statistics_raid_unit_GET ( String serviceName , String unit ) throws IOException { } }
|
String qPath = "/dedicated/server/{serviceName}/statistics/raid/{unit}" ; StringBuilder sb = path ( qPath , serviceName , unit ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhRtmRaid . class ) ;
|
public class DataSet { /** * Initiates a Join transformation .
* < p > A Join transformation joins the elements of two
* { @ link DataSet DataSets } on key equality and provides multiple ways to combine
* joining elements into one DataSet .
* < p > This method returns a { @ link JoinOperatorSets } on which one of the { @ code where } methods
* can be called to define the join key of the first joining ( i . e . , this ) DataSet .
* @ param other The other DataSet with which this DataSet is joined .
* @ param strategy The strategy that should be used execute the join . If { @ code null } is given , then the
* optimizer will pick the join strategy .
* @ return A JoinOperatorSets to continue the definition of the Join transformation .
* @ see JoinOperatorSets
* @ see DataSet */
public < R > JoinOperatorSets < T , R > join ( DataSet < R > other , JoinHint strategy ) { } }
|
return new JoinOperatorSets < > ( this , other , strategy ) ;
|
public class SimpleBase { /** * Loads a new matrix from a serialized binary file .
* @ see MatrixIO # loadBin ( String )
* @ param fileName File which is to be loaded .
* @ return The matrix .
* @ throws IOException */
public static SimpleMatrix loadBinary ( String fileName ) throws IOException { } }
|
DMatrix mat = MatrixIO . loadBin ( fileName ) ; // see if its a DMatrixRMaj
if ( mat instanceof DMatrixRMaj ) { return SimpleMatrix . wrap ( ( DMatrixRMaj ) mat ) ; } else { // if not convert it into one and wrap it
return SimpleMatrix . wrap ( new DMatrixRMaj ( mat ) ) ; }
|
public class ZaurusTableForm { /** * set all fields for primary keys to not editable */
private void disablePKFields ( ) { } }
|
for ( int i = 0 ; i < primaryKeys . length ; i ++ ) { komponente [ pkColIndex [ i ] ] . setEditable ( false ) ; } // end of for ( int i = 0 ; i < columns . length ; i + + )
|
public class ListContext { /** * Join the contents of the given list separated by the given separator .
* For example , the list [ a , b , c ] with separator ' ' would result in :
* < code > a b c < / code > .
* @ param list The list to join
* @ param separator The separator between values
* @ return The resulting string */
public String join ( List < ? > list , String separator ) { } }
|
int size = list . size ( ) ; StringBuilder buffer = new StringBuilder ( 512 ) ; for ( int i = 0 ; i < size ; i ++ ) { Object item = list . get ( i ) ; if ( i > 0 ) { buffer . append ( separator ) ; } buffer . append ( item . toString ( ) ) ; } return buffer . toString ( ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.