signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class GetObjectProperties { /** * Uses the new RetrievePropertiesEx method to emulate the now deprecated
* RetrieveProperties method
* @ param propertyFilterSpecList
* @ return list of object content
* @ throws Exception */
private static List < ObjectContent > retrievePropertiesAllObjects ( ConnectionResources connectionResources , List < PropertyFilterSpec > propertyFilterSpecList ) throws RuntimeFaultFaultMsg , InvalidPropertyFaultMsg { } } | VimPortType vimPort = connectionResources . getVimPortType ( ) ; ManagedObjectReference serviceInstance = connectionResources . getServiceInstance ( ) ; ServiceContent serviceContent = vimPort . retrieveServiceContent ( serviceInstance ) ; ManagedObjectReference propertyCollectorReference = serviceContent . getPropertyCollector ( ) ; RetrieveOptions propertyObjectRetrieveOptions = new RetrieveOptions ( ) ; List < ObjectContent > objectContentList = new ArrayList < > ( ) ; RetrieveResult results = vimPort . retrievePropertiesEx ( propertyCollectorReference , propertyFilterSpecList , propertyObjectRetrieveOptions ) ; if ( results != null && results . getObjects ( ) != null && ! results . getObjects ( ) . isEmpty ( ) ) { objectContentList . addAll ( results . getObjects ( ) ) ; } String token = null ; if ( results != null && results . getToken ( ) != null ) { token = results . getToken ( ) ; } while ( token != null && ! token . isEmpty ( ) ) { results = vimPort . continueRetrievePropertiesEx ( propertyCollectorReference , token ) ; token = null ; if ( results != null ) { token = results . getToken ( ) ; if ( results . getObjects ( ) != null && ! results . getObjects ( ) . isEmpty ( ) ) { objectContentList . addAll ( results . getObjects ( ) ) ; } } } return objectContentList ; |
public class Chrono { /** * JAXB compatible dataTime Stamp
* Java 6 does not format Timezone with - 05:00 format , and JAXB XML breaks without it .
* @ return */
public static String dateTime ( Date date ) { } } | GregorianCalendar gc = new GregorianCalendar ( ) ; gc . setTime ( date ) ; return dateTime ( gc ) ; |
public class CSVDumper { /** * - - - - - Private methods */
private static void usage ( ) { } } | display ( "Usage: CSVDumper <params>" ) ; display ( "where <params> are:" ) ; display ( " -app <name> Doradus application name. Default is: {}" , CSVConfig . DEFAULT_APPNAME ) ; display ( " -batchsize <#> Batch size. Default is: {}" , CSVConfig . DEFAULT_BATCH_SIZE ) ; display ( " -compress [T|F] Compress messages. Default is: {}" , CSVConfig . DEFAULT_COMPRESS ) ; display ( " -host <host> Doradus server host name. Default is: {}" , CSVConfig . DEFAULT_HOST ) ; display ( " -id <name> Column name of ID field. Default is: {}" , CSVConfig . DEFAULT_ID_FIELD ) ; display ( " -password <pw> Password for tenant. Default is: {}" , CSVConfig . DEFAULT_PASSWORD ) ; display ( " -port <port> Doradus server port. Default is: {}" , CSVConfig . DEFAULT_PORT ) ; display ( " -root <folder> Root folder of CSV files. Default is: {}" , CSVConfig . DEFAULT_ROOT ) ; display ( " -shard <name> (OLAP only): Name of shard to load. Default is: {}" , CSVConfig . DEFAULT_SHARD ) ; display ( " -tenant <name> Name of tenant to use. Default is: {}" , CSVConfig . DEFAULT_TENANT ) ; display ( " -user <ID> User ID for tenant. Default is: {}" , CSVConfig . DEFAULT_USER ) ; display ( " -workers <#> # of worker threads. Default is: {}" , CSVConfig . DEFAULT_WORKERS ) ; display ( "Reads all records in all tables for the given OLAP or Spider application and dumps" ) ; display ( "them to CSV files found in 'root' folder. TLS options are also available." ) ; System . exit ( 0 ) ; |
public class DefaultGroovyMethods { /** * Iterates through the given array as with inject ( Object [ ] , initialValue , closure ) , but
* using the first element of the array as the initialValue , and then iterating
* the remaining elements of the array .
* @ param self an Object [ ]
* @ param closure a closure
* @ return the result of the last closure call
* @ throws NoSuchElementException if the array is empty .
* @ see # inject ( Object [ ] , Object , Closure )
* @ since 1.8.7 */
public static < E , T , V extends T > T inject ( E [ ] self , @ ClosureParams ( value = FromString . class , options = "E,E" ) Closure < V > closure ) { } } | return inject ( ( Object ) self , closure ) ; |
public class AudioChannel { /** * Use the RTPManager API to create sessions for each jmf
* track of the processor .
* @ return description */
private String createTransmitter ( ) { } } | // Cheated . Should have checked the type .
PushBufferDataSource pbds = ( PushBufferDataSource ) dataOutput ; PushBufferStream [ ] pbss = pbds . getStreams ( ) ; rtpMgrs = new RTPManager [ pbss . length ] ; SessionAddress localAddr , destAddr ; InetAddress ipAddr ; SendStream sendStream ; audioReceiver = new AudioReceiver ( this , jingleMediaSession ) ; int port ; for ( int i = 0 ; i < pbss . length ; i ++ ) { try { rtpMgrs [ i ] = RTPManager . newInstance ( ) ; port = portBase + 2 * i ; ipAddr = InetAddress . getByName ( remoteIpAddress ) ; localAddr = new SessionAddress ( InetAddress . getByName ( this . localIpAddress ) , localPort ) ; destAddr = new SessionAddress ( ipAddr , port ) ; rtpMgrs [ i ] . addReceiveStreamListener ( audioReceiver ) ; rtpMgrs [ i ] . addSessionListener ( audioReceiver ) ; BufferControl bc = ( BufferControl ) rtpMgrs [ i ] . getControl ( "javax.media.control.BufferControl" ) ; if ( bc != null ) { int bl = 160 ; bc . setBufferLength ( bl ) ; } try { rtpMgrs [ i ] . initialize ( localAddr ) ; } catch ( InvalidSessionAddressException e ) { // In case the local address is not allowed to read , we user another local address
SessionAddress sessAddr = new SessionAddress ( ) ; localAddr = new SessionAddress ( sessAddr . getDataAddress ( ) , localPort ) ; rtpMgrs [ i ] . initialize ( localAddr ) ; } rtpMgrs [ i ] . addTarget ( destAddr ) ; LOGGER . severe ( "Created RTP session at " + localPort + " to: " + remoteIpAddress + " " + port ) ; sendStream = rtpMgrs [ i ] . createSendStream ( dataOutput , i ) ; sendStreams . add ( sendStream ) ; sendStream . start ( ) ; } catch ( Exception e ) { LOGGER . log ( Level . WARNING , "exception" , e ) ; return e . getMessage ( ) ; } } return null ; |
public class Alignments { /** * Factory method which computes a similarity score for the given { @ link Sequence } pair .
* @ param < S > each { @ link Sequence } of the pair is of type S
* @ param < C > each element of a { @ link Sequence } is a { @ link Compound } of type C
* @ param query the first { @ link Sequence } to score
* @ param target the second { @ link Sequence } to score
* @ param type chosen type from list of pairwise sequence scoring routines
* @ param gapPenalty the gap penalties used during alignment
* @ param subMatrix the set of substitution scores used during alignment
* @ return sequence pair score */
static < S extends Sequence < C > , C extends Compound > double getPairwiseScore ( S query , S target , PairwiseSequenceScorerType type , GapPenalty gapPenalty , SubstitutionMatrix < C > subMatrix ) { } } | return getPairwiseScorer ( query , target , type , gapPenalty , subMatrix ) . getScore ( ) ; |
public class HCHelper { /** * Recursively iterate all child nodes of the passed node .
* @ param aNode
* The node who ' s children should be iterated .
* @ param aCallback
* The callback to be invoked on every child */
public static void iterateChildrenNonBreakable ( @ Nonnull final IHCNode aNode , @ Nonnull final IHCIteratorNonBreakableCallback aCallback ) { } } | ValueEnforcer . notNull ( aNode , "node" ) ; ValueEnforcer . notNull ( aCallback , "callback" ) ; final ICommonsList < IHCNode > aParentNodes = new CommonsArrayList < > ( ) ; final ICommonsList < IHCNode > aNodes = new CommonsArrayList < > ( ) ; if ( aNode . hasChildren ( ) ) for ( final IHCNode aChild : aNode . getChildren ( ) ) { // Remember nodes for later processing
aParentNodes . add ( aNode ) ; aNodes . add ( aChild ) ; } _iterateTreeNonBreakable ( aParentNodes , aNodes , aCallback ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getERG ( ) { } } | if ( ergEClass == null ) { ergEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 259 ) ; } return ergEClass ; |
public class StreamScanner { /** * Simple parsing method that parses system ids , which are generally
* used in entities ( from DOCTYPE declaration to internal / external
* subsets ) .
* NOTE : returned String is not canonicalized , on assumption that
* external ids may be longish , and are not shared all that often , as
* they are generally just used for resolving paths , if anything .
* < br / >
* Also note that this method is not heavily optimized , as it ' s not
* likely to be a bottleneck for parsing . */
protected final String parseSystemId ( char quoteChar , boolean convertLFs , String errorMsg ) throws XMLStreamException { } } | char [ ] buf = getNameBuffer ( - 1 ) ; int ptr = 0 ; while ( true ) { char c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : getNextChar ( errorMsg ) ; if ( c == quoteChar ) { break ; } /* ? ? ? 14 - Jun - 2004 , TSa : Should we normalize linefeeds or not ?
* It seems like we should , for all input . . . so that ' s the way it
* works . */
if ( c == '\n' ) { markLF ( ) ; } else if ( c == '\r' ) { if ( peekNext ( ) == '\n' ) { ++ mInputPtr ; if ( ! convertLFs ) { /* The only tricky thing ; need to preserve 2 - char LF ; need to
* output one char from here , then can fall back to default : */
if ( ptr >= buf . length ) { buf = expandBy50Pct ( buf ) ; } buf [ ptr ++ ] = '\r' ; } c = '\n' ; } else if ( convertLFs ) { c = '\n' ; } } // Other than that , let ' s just append it :
if ( ptr >= buf . length ) { buf = expandBy50Pct ( buf ) ; } buf [ ptr ++ ] = c ; } return ( ptr == 0 ) ? "" : new String ( buf , 0 , ptr ) ; |
public class MaF15 { /** * Evaluates a solution
* @ param solution The solution to evaluate */
@ Override public void evaluate ( DoubleSolution solution ) { } } | int numberOfVariables = solution . getNumberOfVariables ( ) ; int numberOfObjectives = solution . getNumberOfObjectives ( ) ; double [ ] x = new double [ numberOfVariables ] ; double [ ] f = new double [ numberOfObjectives ] ; for ( int i = 0 ; i < numberOfVariables ; i ++ ) { x [ i ] = solution . getVariableValue ( i ) ; } // change x
for ( int i = numberOfObjectives - 1 ; i < numberOfVariables ; i ++ ) { x [ i ] = ( 1 + Math . cos ( ( i + 1 ) / ( double ) numberOfVariables * Math . PI / 2 ) ) * x [ i ] - 10 * x [ 0 ] ; } // evaluate eta , g
double [ ] g = new double [ numberOfObjectives ] ; double sub1 ; for ( int i = 0 ; i < numberOfObjectives ; i = i + 2 ) { double [ ] tx = new double [ sublen15 [ i ] ] ; sub1 = 0 ; for ( int j = 0 ; j < nk15 ; j ++ ) { System . arraycopy ( x , len15 [ i ] + numberOfObjectives - 1 + j * sublen15 [ i ] , tx , 0 , sublen15 [ i ] ) ; sub1 += Griewank ( tx ) ; } g [ i ] = sub1 / ( nk15 * sublen15 [ i ] ) ; } for ( int i = 1 ; i < numberOfObjectives ; i = i + 2 ) { double [ ] tx = new double [ sublen15 [ i ] ] ; sub1 = 0 ; for ( int j = 0 ; j < nk15 ; j ++ ) { System . arraycopy ( x , len15 [ i ] + numberOfObjectives - 1 + j * sublen15 [ i ] , tx , 0 , sublen15 [ i ] ) ; sub1 += Sphere ( tx ) ; } g [ i ] = sub1 / ( nk15 * sublen15 [ i ] ) ; } // evaluate fm , fm - 1 , . . . , 2 , f1
double subf1 = 1 ; f [ numberOfObjectives - 1 ] = ( 1 - Math . sin ( Math . PI * x [ 0 ] / 2 ) ) * ( 1 + g [ numberOfObjectives - 1 ] ) ; for ( int i = numberOfObjectives - 2 ; i > 0 ; i -- ) { subf1 *= Math . cos ( Math . PI * x [ numberOfObjectives - i - 2 ] / 2 ) ; f [ i ] = ( 1 - subf1 * Math . sin ( Math . PI * x [ numberOfObjectives - i - 1 ] / 2 ) ) * ( 1 + g [ i ] + g [ i + 1 ] ) ; } f [ 0 ] = ( 1 - subf1 * Math . cos ( Math . PI * x [ numberOfObjectives - 2 ] / 2 ) ) * ( 1 + g [ 0 ] + g [ 1 ] ) ; for ( int i = 0 ; i < numberOfObjectives ; i ++ ) { solution . setObjective ( i , f [ i ] ) ; } |
public class Proxy { protected void startSession ( ) throws SocksException { } } | System . out . println ( "Session: " + proxyHost + ":" + proxyPort ) ; try { if ( chainProxy == null ) proxySocket = new Socket ( proxyIP , proxyPort ) ; else if ( proxyIP != null ) proxySocket = new SocksSocket ( chainProxy , proxyIP , proxyPort ) ; else proxySocket = new SocksSocket ( chainProxy , proxyHost , proxyPort ) ; in = proxySocket . getInputStream ( ) ; out = proxySocket . getOutputStream ( ) ; } catch ( SocksException se ) { throw se ; } catch ( IOException io_ex ) { throw new SocksException ( SOCKS_PROXY_IO_ERROR , "" + io_ex ) ; } |
public class CassQuery { /** * Returns bytes value for given value .
* @ param jpaFieldName
* field name .
* @ param m
* entity metadata
* @ param value
* value .
* @ return bytes value . */
ByteBuffer getBytesValue ( String jpaFieldName , EntityMetadata m , Object value ) { } } | Attribute idCol = m . getIdAttribute ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entity = metaModel . entity ( m . getEntityClazz ( ) ) ; Field f = null ; boolean isId = false ; if ( ( ( AbstractAttribute ) idCol ) . getJPAColumnName ( ) . equals ( jpaFieldName ) ) { f = ( Field ) idCol . getJavaMember ( ) ; isId = true ; } else { if ( jpaFieldName != null && jpaFieldName . indexOf ( Constants . INDEX_TABLE_ROW_KEY_DELIMITER ) > 0 ) { String embeddedFieldName = jpaFieldName . substring ( 0 , jpaFieldName . indexOf ( Constants . INDEX_TABLE_ROW_KEY_DELIMITER ) ) ; String columnFieldName = jpaFieldName . substring ( jpaFieldName . indexOf ( Constants . INDEX_TABLE_ROW_KEY_DELIMITER ) + 1 , jpaFieldName . length ( ) ) ; Attribute embeddedAttr = entity . getAttribute ( embeddedFieldName ) ; try { Class < ? > embeddedClass = embeddedAttr . getJavaType ( ) ; if ( Collection . class . isAssignableFrom ( embeddedClass ) ) { Class < ? > genericClass = PropertyAccessorHelper . getGenericClass ( ( Field ) embeddedAttr . getJavaMember ( ) ) ; f = genericClass . getDeclaredField ( columnFieldName ) ; } else { f = embeddedClass . getDeclaredField ( columnFieldName ) ; } } catch ( SecurityException e ) { log . error ( "Error while extrating " + jpaFieldName + ", Caused by: " , e ) ; throw new QueryHandlerException ( "Error while extrating " + jpaFieldName + "." ) ; } catch ( NoSuchFieldException e ) { log . error ( "Error while extrating " + jpaFieldName + ", Caused by: " , e ) ; throw new QueryHandlerException ( "Error while extrating " + jpaFieldName + "." ) ; } } else { String discriminatorColumn = ( ( AbstractManagedType ) entity ) . getDiscriminatorColumn ( ) ; if ( ! jpaFieldName . equals ( discriminatorColumn ) ) { String fieldName = m . getFieldName ( jpaFieldName ) ; Attribute col = entity . getAttribute ( fieldName ) ; if ( col == null ) { throw new QueryHandlerException ( "column type is null for: " + jpaFieldName ) ; } f = ( Field ) col . getJavaMember ( ) ; } } } // need to do integer . parseInt . . as value will be string in case of
// create query .
if ( f != null && f . getType ( ) != null ) { return CassandraUtilities . toBytes ( value , f ) ; } else { // default is String type
return CassandraUtilities . toBytes ( value , String . class ) ; } |
public class StateConsumerImpl { /** * not used in scattered cache */
private void requestSegments ( IntSet segments , Map < Address , IntSet > sources , Set < Address > excludedSources ) { } } | if ( sources . isEmpty ( ) ) { findSources ( segments , sources , excludedSources ) ; } for ( Map . Entry < Address , IntSet > e : sources . entrySet ( ) ) { addTransfer ( e . getKey ( ) , e . getValue ( ) ) ; } |
public class Blade { /** * Stop current blade application
* Will stop synchronization waiting netty service */
public void stop ( ) { } } | this . eventManager . fireEvent ( EventType . SERVER_STOPPING , new Event ( ) . attribute ( "blade" , this ) ) ; this . server . stopAndWait ( ) ; this . eventManager . fireEvent ( EventType . SERVER_STOPPED , new Event ( ) . attribute ( "blade" , this ) ) ; |
public class EntityListenersIntrospector { /** * Returns the metadata of various registered listeners for the given entity class .
* @ param entityClass
* the entity class to introspect
* @ return the metadata of various listeners . */
public static EntityListenersMetadata introspect ( Class < ? > entityClass ) { } } | EntityListenersIntrospector introspector = new EntityListenersIntrospector ( entityClass ) ; introspector . introspect ( ) ; return introspector . metadata ; |
public class ResourceLoader { /** * Loads a resource into a { @ link List } of strings , each entry representing one line .
* @ param resource
* The resource to be loaded .
* @ return The list */
public static List < String > readIntoLines ( final String resource ) throws IOException { } } | BufferedReader br = null ; try { br = ResourceLoader . getBufferedReader ( resource ) ; return doReadIntoLines ( br ) ; } finally { IOUtils . closeQuietly ( br ) ; } |
public class CartUrl { /** * Get Resource Url for GetCart
* @ param cartId Identifier of the cart to delete .
* @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss .
* @ return String Resource Url */
public static MozuUrl getCartUrl ( String cartId , String responseFields ) { } } | UrlFormatter formatter = new UrlFormatter ( "/api/commerce/carts/{cartId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "cartId" , cartId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ; |
public class MimeTypeParser { /** * Check if the passed string is a valid MIME token by checking that the
* length is at least 1 and all chars match the { @ link # isTokenChar ( char ) }
* condition .
* @ param sToken
* The token to be checked . May be < code > null < / code > .
* @ return < code > true < / code > if the passed string is valid token ,
* < code > false < / code > otherwise . */
public static boolean isToken ( @ Nullable final String sToken ) { } } | // Check length
if ( StringHelper . hasNoText ( sToken ) ) return false ; // Check that all chars are token chars
final char [ ] aChars = sToken . toCharArray ( ) ; for ( final char c : aChars ) if ( ! isTokenChar ( c ) ) return false ; return true ; |
public class DelegatingClassResolver { /** * { @ inheritDoc } */
public Iterator < URL > getResources ( String name ) { } } | ArrayList < URL > collectedResources = new ArrayList < URL > ( ) ; for ( IClassResolver resolver : resolvers ) { try { Iterator < URL > iterator = resolver . getResources ( name ) ; if ( iterator == null ) { continue ; } while ( iterator . hasNext ( ) ) { collectedResources . add ( iterator . next ( ) ) ; } } catch ( RuntimeException e ) { LOGGER . warn ( "ClassResolver {} threw an unexpected exception." , resolver , e ) ; return collectedResources . iterator ( ) ; } } return collectedResources . iterator ( ) ; |
public class AddressUtil { /** * Creates a address to the given host , or the wildcard host if the hostname is
* { @ link StringUtil # blank } . */
public static InetSocketAddress getAddress ( String hostname , int port ) { } } | return StringUtil . isBlank ( hostname ) ? new InetSocketAddress ( port ) : new InetSocketAddress ( hostname , port ) ; |
public class S3Utils { /** * Retries S3 operations that fail due to io - related exceptions . Service - level exceptions ( access denied , file not
* found , etc ) are not retried . */
public static < T > T retryS3Operation ( Task < T > f ) throws Exception { } } | final int maxTries = 10 ; return RetryUtils . retry ( f , S3RETRY , maxTries ) ; |
public class ScopedRequestImpl { /** * Add a parameter to the request .
* @ param name the parameter name .
* @ param value the parameter value . */
public void addParameter ( String name , String value ) { } } | if ( _additionalParameters == null ) { _additionalParameters = new HashMap ( ) ; } _additionalParameters . put ( name , value ) ; |
public class MessageAttributeValue { /** * Not implemented . Reserved for future use .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setStringListValues ( java . util . Collection ) } or { @ link # withStringListValues ( java . util . Collection ) } if you
* want to override the existing values .
* @ param stringListValues
* Not implemented . Reserved for future use .
* @ return Returns a reference to this object so that method calls can be chained together . */
public MessageAttributeValue withStringListValues ( String ... stringListValues ) { } } | if ( this . stringListValues == null ) { setStringListValues ( new com . amazonaws . internal . SdkInternalList < String > ( stringListValues . length ) ) ; } for ( String ele : stringListValues ) { this . stringListValues . add ( ele ) ; } return this ; |
public class ScriptUtil { /** * return lua script String
* @ param path
* @ return */
public static String getScript ( String path ) { } } | StringBuilder sb = new StringBuilder ( ) ; InputStream stream = ScriptUtil . class . getClassLoader ( ) . getResourceAsStream ( path ) ; try ( BufferedReader br = new BufferedReader ( new InputStreamReader ( stream ) ) ) { String str ; while ( ( str = br . readLine ( ) ) != null ) { sb . append ( str ) . append ( System . lineSeparator ( ) ) ; } } catch ( IOException e ) { System . err . println ( e . getStackTrace ( ) ) ; } return sb . toString ( ) ; |
public class SarlBatchCompiler { /** * Change the boot classpath .
* This option is only supported on JDK 8 and older and will be ignored when source level is 9 or newer .
* < p > The boot classpath is a list the names of folders or jar files that are separated by { @ link File # pathSeparator } .
* @ param bootClasspath the new boot classpath .
* @ see " https : / / www . oracle . com / technetwork / java / javase / 9 - relnote - issues - 3704069 . html " */
public void setBootClassPath ( String bootClasspath ) { } } | final JavaVersion version = JavaVersion . fromQualifier ( getJavaSourceVersion ( ) ) ; if ( version . isAtLeast ( JavaVersion . JAVA9 ) ) { reportInternalWarning ( MessageFormat . format ( Messages . SarlBatchCompiler_63 , bootClasspath ) ) ; } if ( Strings . isEmpty ( bootClasspath ) ) { this . bootClasspath = null ; } else { this . bootClasspath = new ArrayList < > ( ) ; for ( final String path : Strings . split ( bootClasspath , Pattern . quote ( File . pathSeparator ) ) ) { this . bootClasspath . add ( normalizeFile ( path ) ) ; } } |
public class BottomSheet { /** * Set the icon of the bottom sheet .
* @ param attributeId
* The id of the theme attribute , which supplies the icon , which should be set , as an
* { @ link Integer } value . The id must point to a valid drawable resource */
public final void setIconAttribute ( @ AttrRes final int attributeId ) { } } | TypedArray typedArray = getContext ( ) . getTheme ( ) . obtainStyledAttributes ( new int [ ] { attributeId } ) ; this . icon = typedArray . getDrawable ( 0 ) ; this . iconBitmap = null ; this . iconId = - 1 ; this . iconAttributeId = attributeId ; adaptIcon ( ) ; |
public class NetUtil { /** * 构建InetSocketAddress < br >
* 当host中包含端口时 ( 用 “ : ” 隔开 ) , 使用host中的端口 , 否则使用默认端口 < br >
* 给定host为空时使用本地host ( 127.0.0.1)
* @ param host Host
* @ param defaultPort 默认端口
* @ return InetSocketAddress */
public static InetSocketAddress buildInetSocketAddress ( String host , int defaultPort ) { } } | if ( StrUtil . isBlank ( host ) ) { host = LOCAL_IP ; } String destHost = null ; int port = 0 ; int index = host . indexOf ( ":" ) ; if ( index != - 1 ) { // host : port形式
destHost = host . substring ( 0 , index ) ; port = Integer . parseInt ( host . substring ( index + 1 ) ) ; } else { destHost = host ; port = defaultPort ; } return new InetSocketAddress ( destHost , port ) ; |
public class EncodingUtils { /** * Returns a Vector of locales from the passed in request object .
* @ param req The request object to extract the locales from .
* @ return The extracted locales . */
public static Vector getLocales ( HttpServletRequest req ) { } } | init ( ) ; String acceptLanguage = req . getHeader ( "Accept-Language" ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "getLocales" , "Accept-Language --> " + acceptLanguage ) ; } // Short circuit with an empty enumeration if null header
if ( ( acceptLanguage == null ) || ( acceptLanguage . trim ( ) . length ( ) == 0 ) ) { Vector def = new Vector ( ) ; def . addElement ( Locale . getDefault ( ) ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "getLocales" , "processed Locales --> " , def ) ; } return def ; } // Check cache
Vector langList = null ; langList = ( Vector ) localesCache . get ( acceptLanguage ) ; if ( langList == null ) { // Create and add to cache
langList = processAcceptLanguage ( acceptLanguage ) ; if ( WCCustomProperties . VALIDATE_LOCALE_VALUES ) { langList = extractLocales ( langList , true ) ; } else langList = extractLocales ( langList , false ) ; localesCache . put ( acceptLanguage , langList ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "getLocales" , "processed Locales --> " + langList ) ; } return langList ; |
public class MtasSolrStatus { /** * Gets the long map .
* @ param response
* the response
* @ param args
* the args
* @ return the long map */
private final Map < String , Long > getLongMap ( NamedList < Object > response , String ... args ) { } } | Object objectItem = response . findRecursive ( args ) ; if ( objectItem != null && objectItem instanceof Map ) { return ( Map ) objectItem ; } else { return null ; } |
public class DTMManagerDefault { /** * Release the DTMManager ' s reference ( s ) to a DTM , making it unmanaged .
* This is typically done as part of returning the DTM to the heap after
* we ' re done with it .
* @ param dtm the DTM to be released .
* @ param shouldHardDelete If false , this call is a suggestion rather than an
* order , and we may not actually release the DTM . This is intended to
* support intelligent caching of documents . . . which is not implemented
* in this version of the DTM manager .
* @ return true if the DTM was released , false if shouldHardDelete was set
* and we decided not to . */
synchronized public boolean release ( DTM dtm , boolean shouldHardDelete ) { } } | if ( DEBUG ) { System . out . println ( "Releasing " + ( shouldHardDelete ? "HARD" : "soft" ) + " dtm=" + // Following shouldn ' t need a nodeHandle , but does . . .
// and doesn ' t seem to report the intended value
dtm . getDocumentBaseURI ( ) ) ; } if ( dtm instanceof SAX2DTM ) { ( ( SAX2DTM ) dtm ) . clearCoRoutine ( ) ; } // Multiple DTM IDs may be assigned to a single DTM .
// The Right Answer is to ask which ( if it supports
// extension , the DTM will need a list anyway ) . The
// Wrong Answer , applied if the DTM can ' t help us ,
// is to linearly search them all ; this may be very
// painful .
// % REVIEW % Should the lookup move up into the basic DTM API ?
if ( dtm instanceof DTMDefaultBase ) { org . apache . xml . utils . SuballocatedIntVector ids = ( ( DTMDefaultBase ) dtm ) . getDTMIDs ( ) ; for ( int i = ids . size ( ) - 1 ; i >= 0 ; -- i ) m_dtms [ ids . elementAt ( i ) >>> DTMManager . IDENT_DTM_NODE_BITS ] = null ; } else { int i = getDTMIdentity ( dtm ) ; if ( i >= 0 ) { m_dtms [ i >>> DTMManager . IDENT_DTM_NODE_BITS ] = null ; } } dtm . documentRelease ( ) ; return true ; |
public class MySQLBaseDAO { /** * Execute a statement with no expected return value within a given transaction .
* @ param tx The transactional { @ link Connection } to use .
* @ param query The query string to prepare .
* @ param function The functional callback to pass a { @ link Query } to . */
protected void execute ( Connection tx , String query , ExecuteFunction function ) { } } | try ( Query q = new Query ( objectMapper , tx , query ) ) { function . apply ( q ) ; } catch ( SQLException ex ) { throw new ApplicationException ( ApplicationException . Code . BACKEND_ERROR , ex ) ; } |
public class MetricsLongValue { /** * Push the metric to the mr .
* The metric is pushed only if it was updated since last push
* Note this does NOT push to JMX
* ( JMX gets the info via { @ link # get ( ) }
* @ param mr */
public synchronized void pushMetric ( final MetricsRecord mr ) { } } | if ( changed ) mr . setMetric ( getName ( ) , value ) ; changed = false ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getFontDescriptorSpecificationFtWtClass ( ) { } } | if ( fontDescriptorSpecificationFtWtClassEEnum == null ) { fontDescriptorSpecificationFtWtClassEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 112 ) ; } return fontDescriptorSpecificationFtWtClassEEnum ; |
public class FileUtil { /** * Create a soft link between a src and destination
* only on a local disk . HDFS does not support this
* @ param target the target for symlink
* @ param linkname the symlink
* @ return value returned by the command */
public static int symLink ( String target , String linkname ) throws IOException { } } | String cmd = "ln -s " + target + " " + linkname ; Process p = Runtime . getRuntime ( ) . exec ( cmd , null ) ; int returnVal = - 1 ; try { returnVal = p . waitFor ( ) ; } catch ( InterruptedException e ) { // do nothing as of yet
} return returnVal ; |
public class JsonUtils { /** * Create and configure the { @ link ObjectMapper } used for serializing and deserializing
* JSON requests and responses .
* @ return a configured { @ link ObjectMapper } */
public static ObjectMapper buildObjectMapper ( ) { } } | ObjectMapper objectMapper = new ObjectMapper ( ) ; objectMapper . setDateFormat ( new StdDateFormat ( ) ) ; objectMapper . setPropertyNamingStrategy ( new PropertyNamingStrategy . SnakeCaseStrategy ( ) ) ; objectMapper . setSerializationInclusion ( JsonInclude . Include . NON_EMPTY ) ; objectMapper . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) ; objectMapper . configure ( DeserializationFeature . READ_ENUMS_USING_TO_STRING , true ) ; objectMapper . configure ( SerializationFeature . WRITE_ENUMS_USING_TO_STRING , true ) ; configureCredentialDetailTypeMapping ( objectMapper ) ; return objectMapper ; |
public class XmlInputStream { /** * Read n characters .
* @ param n the number of characters to read
* @ return the characters read
* @ throws IOException thrown when an error occurs */
private StringBuilder read ( int n ) throws IOException { } } | // Input stream finished ?
boolean eof = false ; // Read that many .
final StringBuilder s = new StringBuilder ( n ) ; while ( s . length ( ) < n && ! eof ) { // Always get from the pushBack buffer .
if ( pushBack . length ( ) == 0 ) { // Read something from the stream into pushBack .
eof = readIntoPushBack ( ) ; } // Pushback only contains deliverable codes .
if ( pushBack . length ( ) > 0 ) { // Grab one character
s . append ( pushBack . charAt ( 0 ) ) ; // Remove it from pushBack
pushBack . deleteCharAt ( 0 ) ; } } return s ; |
public class ClassUseWriter { /** * Add the packages elements that use the given class .
* @ param contentTree the content tree to which the packages elements will be added */
protected void addPackageList ( Content contentTree ) { } } | Content caption = getTableCaption ( configuration . getContent ( "doclet.ClassUse_Packages.that.use.0" , getLink ( new LinkInfoImpl ( configuration , LinkInfoImpl . Kind . CLASS_USE_HEADER , typeElement ) ) ) ) ; Content table = ( configuration . isOutputHtml5 ( ) ) ? HtmlTree . TABLE ( HtmlStyle . useSummary , caption ) : HtmlTree . TABLE ( HtmlStyle . useSummary , useTableSummary , caption ) ; table . addContent ( getSummaryTableHeader ( packageTableHeader , "col" ) ) ; Content tbody = new HtmlTree ( HtmlTag . TBODY ) ; boolean altColor = true ; for ( PackageElement pkg : pkgSet ) { HtmlTree tr = new HtmlTree ( HtmlTag . TR ) ; tr . addStyle ( altColor ? HtmlStyle . altColor : HtmlStyle . rowColor ) ; altColor = ! altColor ; addPackageUse ( pkg , tr ) ; tbody . addContent ( tr ) ; } table . addContent ( tbody ) ; Content li = HtmlTree . LI ( HtmlStyle . blockList , table ) ; contentTree . addContent ( li ) ; |
public class RegistryQuery { /** * deletes a value or a key
* @ param branch
* @ param entry
* @ throws IOException
* @ throws InterruptedException */
public static void deleteValue ( String branch , String entry ) throws IOException , InterruptedException { } } | if ( entry == null ) { String [ ] cmd = new String [ ] { "reg" , "delete" , cleanBrunch ( branch ) , "/f" } ; executeQuery ( cmd ) ; // executeQuery ( " reg delete \ " " + List . trim ( branch , " \ \ " ) + " \ " / f " ) ;
} else { String [ ] cmd = new String [ ] { "reg" , "delete" , cleanBrunch ( branch ) , "/v" , entry , "/f" } ; executeQuery ( cmd ) ; // executeQuery ( " reg delete \ " " + List . trim ( branch , " \ \ " ) + " \ " / v " + entry + " / f " ) ;
} |
public class DefaultIdStrategy { /** * Registers a delegate . Returns true if registration is successful . */
public < T > boolean registerDelegate ( Delegate < T > delegate ) { } } | return null == delegateMapping . putIfAbsent ( delegate . typeClass ( ) . getName ( ) , delegate ) ; |
public class AJAXBroadcastComponent { /** * Evaluates an EL expression into an object .
* @ param p _ expression
* the expression
* @ throws PropertyNotFoundException
* if the attribute doesn ' t exist at all ( as opposed to being
* null )
* @ return the object */
public static MethodExpression evalAsMethodExpression ( String p_expression ) throws PropertyNotFoundException { } } | FacesContext context = FacesContext . getCurrentInstance ( ) ; ExpressionFactory expressionFactory = context . getApplication ( ) . getExpressionFactory ( ) ; ELContext elContext = context . getELContext ( ) ; MethodExpression mex = expressionFactory . createMethodExpression ( elContext , p_expression , Object . class , new Class [ 0 ] ) ; return mex ; |
public class ListRootsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListRootsRequest listRootsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listRootsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listRootsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listRootsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ZipUtils { /** * Zips a collection of files to a destination zip output stream .
* @ param files A collection of files and directories
* @ param outputStream The output stream of the destination zip file
* @ throws FileNotFoundException
* @ throws IOException */
public static void zipFiles ( List < File > files , OutputStream outputStream ) throws IOException { } } | ZipOutputStream zos = new ZipOutputStream ( outputStream ) ; for ( File file : files ) { if ( file . isDirectory ( ) ) { // if it ' s a folder
addFolderToZip ( "" , file , zos ) ; } else { addFileToZip ( "" , file , zos ) ; } } zos . finish ( ) ; |
public class LevelHandler { /** * Flush the handler . */
public void flush ( ) { } } | int level = getLevel ( ) . intValue ( ) ; for ( int i = 0 ; i < _handlers . length ; i ++ ) { Handler handler = _handlers [ i ] ; if ( level <= handler . getLevel ( ) . intValue ( ) ) handler . flush ( ) ; } |
public class Calendar { /** * Sets the WEEK _ OF _ MONTH and WEEK _ OF _ YEAR fields to new values with the
* new parameter value if they have been calculated internally . */
private void invalidateWeekFields ( ) { } } | if ( stamp [ WEEK_OF_MONTH ] != COMPUTED && stamp [ WEEK_OF_YEAR ] != COMPUTED ) { return ; } // We have to check the new values of these fields after changing
// firstDayOfWeek and / or minimalDaysInFirstWeek . If the field values
// have been changed , then set the new values . ( 4822110)
Calendar cal = ( Calendar ) clone ( ) ; cal . setLenient ( true ) ; cal . clear ( WEEK_OF_MONTH ) ; cal . clear ( WEEK_OF_YEAR ) ; if ( stamp [ WEEK_OF_MONTH ] == COMPUTED ) { int weekOfMonth = cal . get ( WEEK_OF_MONTH ) ; if ( fields [ WEEK_OF_MONTH ] != weekOfMonth ) { fields [ WEEK_OF_MONTH ] = weekOfMonth ; } } if ( stamp [ WEEK_OF_YEAR ] == COMPUTED ) { int weekOfYear = cal . get ( WEEK_OF_YEAR ) ; if ( fields [ WEEK_OF_YEAR ] != weekOfYear ) { fields [ WEEK_OF_YEAR ] = weekOfYear ; } } |
public class ControllerUtil { /** * Return a 200 OK application / binary response with content - disposition attachment .
* @ param is The stream to copy . Content is streamed .
* @ param name Name of file user is downloading .
* @ param length Stream ' s size in bytes . */
protected static void renderBinary ( InputStream is , String name , long length ) { } } | throw new BinaryResult ( is , name , length , false ) ; |
public class Authenticator { /** * Called by the connection management code when an authenticating connection has received its
* authentication request from the client . */
public void authenticateConnection ( Invoker invoker , final AuthingConnection conn , final ResultListener < AuthingConnection > onComplete ) { } } | final AuthRequest req = conn . getAuthRequest ( ) ; final AuthResponseData rdata = createResponseData ( ) ; final AuthResponse rsp = new AuthResponse ( rdata ) ; invoker . postUnit ( new Invoker . Unit ( "authenticateConnection" ) { @ Override public boolean invoke ( ) { try { processAuthentication ( conn , rsp ) ; if ( AuthResponseData . SUCCESS . equals ( rdata . code ) && conn . getAuthName ( ) == null ) { // fail early , fail ( less ) often
throw new IllegalStateException ( "Authenticator failed to provide authname" ) ; } } catch ( AuthException e ) { rdata . code = e . getMessage ( ) ; } catch ( Exception e ) { log . warning ( "Error authenticating user" , "areq" , req , e ) ; rdata . code = AuthCodes . SERVER_ERROR ; } return true ; } @ Override public void handleResult ( ) { // stuff a reference to the auth response into the connection so that we have
// access to it later in the authentication process
conn . setAuthResponse ( rsp ) ; // send the response back to the client
conn . postMessage ( rsp ) ; // if the authentication request was granted , let the connection manager know that
// we just authed
if ( AuthResponseData . SUCCESS . equals ( rdata . code ) ) { onComplete . requestCompleted ( conn ) ; } } } ) ; |
public class Shape { /** * Applies this shape ' s Shadow .
* @ param context
* @ param attr
* @ return boolean */
protected final void doApplyShadow ( final Context2D context , final Attributes attr ) { } } | if ( ( false == isAppliedShadow ( ) ) && ( attr . hasShadow ( ) ) ) { setAppliedShadow ( true ) ; final Shadow shadow = attr . getShadow ( ) ; if ( null != shadow ) { context . setShadow ( shadow ) ; } } |
public class ReadOnlyStorageEngine { /** * Rollback to the specified push version
* @ param rollbackToDir The version directory to rollback to */
public void rollback ( File rollbackToDir ) { } } | logger . info ( "Rolling back store '" + getName ( ) + "'" ) ; fileModificationLock . writeLock ( ) . lock ( ) ; try { if ( rollbackToDir == null ) throw new VoldemortException ( "Version directory specified to rollback is null" ) ; if ( ! rollbackToDir . exists ( ) ) throw new VoldemortException ( "Version directory " + rollbackToDir . getAbsolutePath ( ) + " specified to rollback does not exist" ) ; long versionId = ReadOnlyUtils . getVersionId ( rollbackToDir ) ; if ( versionId == - 1 ) throw new VoldemortException ( "Cannot parse version id" ) ; File [ ] backUpDirs = ReadOnlyUtils . getVersionDirs ( storeDir , versionId , Long . MAX_VALUE ) ; if ( backUpDirs == null || backUpDirs . length <= 1 ) { logger . warn ( "No rollback performed since there are no back-up directories" ) ; return ; } backUpDirs = ReadOnlyUtils . findKthVersionedDir ( backUpDirs , 0 , backUpDirs . length - 1 ) ; if ( isOpen ) close ( ) ; // open the rollback directory
open ( rollbackToDir ) ; // back - up all other directories
DateFormat df = new SimpleDateFormat ( "MM-dd-yyyy" ) ; for ( int index = 1 ; index < backUpDirs . length ; index ++ ) { Utils . move ( backUpDirs [ index ] , new File ( storeDir , backUpDirs [ index ] . getName ( ) + "." + df . format ( new Date ( ) ) + ".bak" ) ) ; } } finally { fileModificationLock . writeLock ( ) . unlock ( ) ; logger . info ( "Rollback operation completed on '" + getName ( ) + "', releasing lock." ) ; } |
public class DnsServerAddresses { /** * Returns the { @ link DnsServerAddresses } that yields only a single { @ code address } . */
public static DnsServerAddresses singleton ( final InetSocketAddress address ) { } } | if ( address == null ) { throw new NullPointerException ( "address" ) ; } if ( address . isUnresolved ( ) ) { throw new IllegalArgumentException ( "cannot use an unresolved DNS server address: " + address ) ; } return new SingletonDnsServerAddresses ( address ) ; |
public class ST_Rotate { /** * Rotates a geometry by a given angle ( in radians ) about the center
* of the geometry ' s envelope .
* @ param geom Geometry
* @ param theta Angle
* @ return The geometry rotated about the center of its envelope */
public static Geometry rotate ( Geometry geom , double theta ) { } } | if ( geom != null ) { Coordinate center = geom . getEnvelopeInternal ( ) . centre ( ) ; return rotate ( geom , theta , center . x , center . y ) ; } else { return null ; } |
public class PropertyListSerialization { /** * Serialize a Number as a real element .
* @ param real
* number to serialize .
* @ param handler
* destination of serialization events .
* @ throws SAXException
* if exception during serialization . */
private static void serializeReal ( final Number real , final ContentHandler handler ) throws SAXException { } } | serializeElement ( "real" , String . valueOf ( real . doubleValue ( ) ) , handler ) ; |
public class PaginatedResult { /** * Retrieves a List of objects from the result .
* @ param < T > the type defined in the List
* @ param clazz the type defined in the List
* @ return a Collection of objects from the result .
* @ since 1.0.0 */
@ SuppressWarnings ( "unchecked" ) public < T > List < T > getList ( Class < T > clazz ) { } } | return ( List < T > ) objects ; |
public class Maze2D { /** * Find the maximum length of the rows of the maze .
* @ param maze instance of maze
* @ return max lenght of all rows */
private int findMaxRowLength ( String maze [ ] ) { } } | int max = 0 ; for ( String rowMaze : maze ) { if ( rowMaze . length ( ) > max ) max = rowMaze . length ( ) ; } return max ; |
public class FilterCriteriaReader14 { /** * { @ inheritDoc } */
@ Override protected FieldType getFieldType ( byte [ ] block ) { } } | int fieldIndex = MPPUtility . getInt ( block , 8 ) ; return FieldTypeHelper . mapTextFields ( FieldTypeHelper . getInstance14 ( fieldIndex ) ) ; |
public class SecurityRealm { /** * Handles the logout processing .
* The default implementation erases the session and do a few other clean up , then
* redirect the user to the URL specified by { @ link # getPostLogOutUrl ( StaplerRequest , Authentication ) } .
* @ since 1.314 */
public void doLogout ( StaplerRequest req , StaplerResponse rsp ) throws IOException , ServletException { } } | HttpSession session = req . getSession ( false ) ; if ( session != null ) session . invalidate ( ) ; Authentication auth = SecurityContextHolder . getContext ( ) . getAuthentication ( ) ; SecurityContextHolder . clearContext ( ) ; // reset remember - me cookie
Cookie cookie = new Cookie ( ACEGI_SECURITY_HASHED_REMEMBER_ME_COOKIE_KEY , "" ) ; cookie . setMaxAge ( 0 ) ; cookie . setSecure ( req . isSecure ( ) ) ; cookie . setHttpOnly ( true ) ; cookie . setPath ( req . getContextPath ( ) . length ( ) > 0 ? req . getContextPath ( ) : "/" ) ; rsp . addCookie ( cookie ) ; rsp . sendRedirect2 ( getPostLogOutUrl ( req , auth ) ) ; |
public class VirtualFile { /** * Get the children
* @ param filter to filter the children
* @ return the children
* @ throws IOException for any problem accessing the virtual file system
* @ throws IllegalStateException if the file is closed or it is a leaf node */
public List < VirtualFile > getChildren ( VirtualFileFilter filter ) throws IOException { } } | // isDirectory does the read security check
if ( ! isDirectory ( ) ) { return Collections . emptyList ( ) ; } if ( filter == null ) { filter = MatchAllVirtualFileFilter . INSTANCE ; } FilterVirtualFileVisitor visitor = new FilterVirtualFileVisitor ( filter , null ) ; visit ( visitor ) ; return visitor . getMatched ( ) ; |
public class GreenMailUtil { /** * Counts the number of lines .
* @ param str the input string
* @ return Returns the number of lines terminated by ' \ n ' in string */
public static int getLineCount ( String str ) { } } | if ( null == str || str . isEmpty ( ) ) { return 0 ; } int count = 1 ; for ( char c : str . toCharArray ( ) ) { if ( '\n' == c ) { count ++ ; } } return count ; |
public class MediaApi { /** * Log out of all media channels
* Log out the current agent on all media channels . You can make a [ / media / { mediatype } / ready ] ( / reference / workspace / Media / index . html # readyAgentState ) or [ / media / { mediatype } / not - ready ] ( / reference / workspace / Media / index . html # notReadyAgentState ) request to log in to the media channel again .
* @ param mediaLogoutData Request parameters . ( optional )
* @ return ApiSuccessResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiSuccessResponse logoutAgentState ( MediaLogoutData mediaLogoutData ) throws ApiException { } } | ApiResponse < ApiSuccessResponse > resp = logoutAgentStateWithHttpInfo ( mediaLogoutData ) ; return resp . getData ( ) ; |
public class RenderUiTemplateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RenderUiTemplateRequest renderUiTemplateRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( renderUiTemplateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( renderUiTemplateRequest . getUiTemplate ( ) , UITEMPLATE_BINDING ) ; protocolMarshaller . marshall ( renderUiTemplateRequest . getTask ( ) , TASK_BINDING ) ; protocolMarshaller . marshall ( renderUiTemplateRequest . getRoleArn ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Sets { /** * Duplicate set .
* This allow ' s the default Set implementation to be swapped
* easily .
* @ param set The set to dupplicate .
* @ return a new empty set . */
public static < T > Set < T > dup ( Collection < T > collection ) { } } | if ( isEmpty ( collection ) ) return new HashSet < T > ( ) ; return new HashSet < T > ( collection ) ; |
public class BTreeIndex { /** * Traverses the directory to find the leaf page corresponding to the lower
* bound of the specified key range . The method then position the page
* before the first record ( if any ) matching the that lower bound . The leaf
* page is kept open , for use by the methods { @ link # next } and
* { @ link # getDataRecordId } .
* @ see Index # beforeFirst */
@ Override public void beforeFirst ( SearchRange searchRange ) { } } | if ( ! searchRange . isValid ( ) ) return ; search ( searchRange , SearchPurpose . READ ) ; |
public class CacheInvocationContextFactory { /** * Create { @ link CacheInvocationContext } parametrized with { @ link CacheRemove } annotation .
* @ param metaHolder the meta holder , see { @ link com . netflix . hystrix . contrib . javanica . command . MetaHolder }
* @ return initialized and configured { @ link CacheInvocationContext } */
public static CacheInvocationContext < CacheRemove > createCacheRemoveInvocationContext ( MetaHolder metaHolder ) { } } | Method method = metaHolder . getMethod ( ) ; if ( method . isAnnotationPresent ( CacheRemove . class ) ) { CacheRemove cacheRemove = method . getAnnotation ( CacheRemove . class ) ; MethodExecutionAction cacheKeyMethod = createCacheKeyAction ( cacheRemove . cacheKeyMethod ( ) , metaHolder ) ; return new CacheInvocationContext < CacheRemove > ( cacheRemove , cacheKeyMethod , metaHolder . getObj ( ) , method , metaHolder . getArgs ( ) ) ; } return null ; |
public class FunctionType { /** * Returns interfaces implemented directly by a class or its superclass . */
public final ImmutableList < ObjectType > getImplementedInterfaces ( ) { } } | FunctionType superCtor = isConstructor ( ) ? getSuperClassConstructor ( ) : null ; if ( superCtor == null ) { return implementedInterfaces ; } ImmutableList . Builder < ObjectType > builder = ImmutableList . builder ( ) ; builder . addAll ( implementedInterfaces ) ; while ( superCtor != null ) { builder . addAll ( superCtor . implementedInterfaces ) ; superCtor = superCtor . getSuperClassConstructor ( ) ; } return builder . build ( ) ; |
public class AbstractBundleLinkRenderer { /** * Renders the links for the global bundles
* @ param ctx
* the context
* @ param out
* the writer
* @ param debugOn
* the debug flag
* @ throws IOException
* if an IOException occurs . */
protected void renderGlobalBundleLinks ( BundleRendererContext ctx , Writer out , boolean debugOn ) throws IOException { } } | if ( debugOn ) { addComment ( "Start adding global members." , out ) ; } performGlobalBundleLinksRendering ( ctx , out , debugOn ) ; ctx . setGlobalBundleAdded ( true ) ; if ( debugOn ) { addComment ( "Finished adding global members." , out ) ; } |
public class SortedIntArrayList { /** * Performs a binary search for the provide value .
* It will return any matching element , not necessarily
* the first or the last . */
protected int binarySearch ( int value ) { } } | int left = 0 ; int right = size - 1 ; while ( left <= right ) { int mid = ( left + right ) >> 1 ; int midValue = elementData [ mid ] ; if ( value == midValue ) return mid ; if ( value < midValue ) right = mid - 1 ; else left = mid + 1 ; } return - ( left + 1 ) ; |
public class JavaOutputType { /** * Calculate the crisp output value .
* @ param method the calling method
* @ param variable the output variable */
private void addCalculateCrispValue ( Program program , Java . CLASS clazz ) { } } | Java . METHOD calc = clazz . addMETHOD ( "private" , "Number" , "calculateCrispValue" ) ; calc . setComment ( "Calculate the crisp value" ) ; calc . setReturnComment ( "the crisp value" ) ; calc . addArg ( "Number" , "from" , "Start interval" ) ; calc . addArg ( "Number" , "to" , "End interval" ) ; calc . addArg ( "Number" , "step" , "Interval step" ) ; calc . addArg ( "double[]" , "fuzzy" , "Fuzzy value" ) ; calc . addS ( "double area = 0.0" ) ; calc . addS ( "double moment = 0.0" ) ; Java . FOR fout = calc . addFOR ( "int i = 0" , "i < fuzzy.length" , "i++" ) ; fout . addS ( "double normalized = from.doubleValue() + (step.doubleValue() * i)" ) ; fout . addS ( "area += fuzzy[i]" ) ; fout . addS ( "moment += fuzzy[i] * normalized" ) ; calc . addS ( "double crisp = Math.abs(area) < " + program . getEpsilon ( ) + " ? " + "to.doubleValue() + step.doubleValue() : moment / area" ) ; calc . addRETURN ( "Math.abs(crisp) > " + program . getEpsilon ( ) + " ? crisp : 0.0" ) ; |
public class Replication { /** * Set the filter to be used by this replication */
@ InterfaceAudience . Public public void setFilter ( String filterName ) { } } | properties . put ( ReplicationField . FILTER_NAME , filterName ) ; replicationInternal . setFilter ( filterName ) ; |
public class DITableInfo { /** * Retrieves , for [ TEMP ] TEXT tables , whether the table ' s data source
* descriptor requests descending read semantics . That is , when this
* value is true , it indicate that the text file is to be read from
* the bottom up . < p >
* @ return whether the table ' s data source
* descriptor requests descending
* read semantics */
Boolean isDataSourceDescending ( ) { } } | if ( table . isText ( ) ) { return ( ( TextTable ) table ) . isDescDataSource ( ) ? Boolean . TRUE : Boolean . FALSE ; } return Boolean . FALSE ; |
public class InjectionSimpleProcessor { /** * Calls { @ link # createInjectionBinding ( Annotation , Member ) } . Simple
* injection processors do not support JNDI names . */
@ Override public final InjectionBinding < A > createInjectionBinding ( A annotation , Class < ? > instanceClass , Member member , String jndiName ) throws InjectionException { } } | return createInjectionBinding ( annotation , instanceClass , member ) ; |
public class DebugWsDelegate { /** * Checks the DM is correctly connected with the messaging server .
* @ param message a customized message content
* @ return the content of the response */
public String checkMessagingConnectionForTheDm ( String message ) throws DebugWsException { } } | this . logger . finer ( "Checking messaging connection with the DM: message=" + message ) ; WebResource path = this . resource . path ( UrlConstants . DEBUG ) . path ( "check-dm" ) ; if ( message != null ) path = path . queryParam ( "message" , message ) ; ClientResponse response = this . wsClient . createBuilder ( path ) . get ( ClientResponse . class ) ; if ( Family . SUCCESSFUL != response . getStatusInfo ( ) . getFamily ( ) ) { String value = response . getEntity ( String . class ) ; this . logger . finer ( response . getStatusInfo ( ) + ": " + value ) ; throw new DebugWsException ( response . getStatusInfo ( ) . getStatusCode ( ) , value ) ; } this . logger . finer ( String . valueOf ( response . getStatusInfo ( ) ) ) ; return response . getEntity ( String . class ) ; |
public class AmazonRedshiftClient { /** * Modifies the parameters of a parameter group .
* For more information about parameters and parameter groups , go to < a
* href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / working - with - parameter - groups . html " > Amazon Redshift
* Parameter Groups < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > .
* @ param modifyClusterParameterGroupRequest
* @ return Result of the ModifyClusterParameterGroup operation returned by the service .
* @ throws ClusterParameterGroupNotFoundException
* The parameter group name does not refer to an existing parameter group .
* @ throws InvalidClusterParameterGroupStateException
* The cluster parameter group action can not be completed because another task is in progress that involves
* the parameter group . Wait a few moments and try the operation again .
* @ sample AmazonRedshift . ModifyClusterParameterGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / ModifyClusterParameterGroup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ModifyClusterParameterGroupResult modifyClusterParameterGroup ( ModifyClusterParameterGroupRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeModifyClusterParameterGroup ( request ) ; |
public class Probe { /** * set the payload type for the response from the Responder . It only support
* XML and JSON .
* @ param respondToPayloadType a payload type string - XML or JSON
* @ throws UnsupportedPayloadType if you don ' t pass in XML or JSON */
public void setRespondToPayloadType ( String respondToPayloadType ) throws UnsupportedPayloadType { } } | // Sanity check on the payload type values . Should be XML or JSON
// If the probe goes out with a bad value here , then the Responder may have
// problems
if ( respondToPayloadType == null || respondToPayloadType . isEmpty ( ) || ( ! respondToPayloadType . equals ( ProbeWrapper . JSON ) && ! respondToPayloadType . equals ( ProbeWrapper . XML ) ) ) throw new UnsupportedPayloadType ( "Attempting to set payload type to: " + respondToPayloadType + ". Cannot be null or empty and must be " + ProbeWrapper . JSON + " or " + ProbeWrapper . XML ) ; _probe . setRespondToPayloadType ( respondToPayloadType ) ; |
public class FeatureListGrid { /** * This method is used only when selection is enabled ( see setSelectionEnabled ) . When a feature selection event is
* sent out from the MapModel , check if we have that row deselected and select it . */
public void onFeatureSelected ( FeatureSelectedEvent event ) { } } | Feature feature = event . getFeature ( ) ; // Only select if it is actually deselected :
boolean selected = false ; ListGridRecord [ ] selections = getSelection ( ) ; for ( ListGridRecord selection : selections ) { if ( selection . getAttribute ( FIELD_NAME_FEATURE_ID ) . equals ( feature . getId ( ) ) ) { selected = true ; break ; } } // If deselected , find the correct row and select :
if ( ! selected ) { ListGridRecord [ ] records = this . getRecords ( ) ; for ( int i = 0 ; i < records . length ; i ++ ) { if ( records [ i ] . getAttribute ( FIELD_NAME_FEATURE_ID ) . equals ( feature . getId ( ) ) ) { selectRecord ( i ) ; break ; } } } |
public class GenericTypeResolver { /** * Resolve the type arguments of the given generic interface against the given target class which is assumed to implement
* the generic interface and possibly declare concrete types for its type variables .
* @ param clazz the target class to check against
* @ param genericIfc the generic interface or superclass to resolve the type argument from
* @ return the resolved type of each argument , with the array size matching the number of actual type arguments , or
* { @ code null } if not resolvable */
public static Class < ? > [ ] resolveTypeArguments ( Class < ? > clazz , Class < ? > genericIfc ) { } } | ResolvableType type = ResolvableType . forClass ( clazz ) . as ( genericIfc ) ; if ( ! type . hasGenerics ( ) || type . isEntirelyUnresolvable ( ) ) { return null ; } return type . resolveGenerics ( Object . class ) ; |
public class VariableReferenceCheck { /** * that we can run it after goog . scope processing , and get rid of the inGoogScope check . */
private void checkForUnusedLocalVar ( Var v , Reference unusedAssignment ) { } } | if ( ! v . isLocal ( ) ) { return ; } JSDocInfo jsDoc = NodeUtil . getBestJSDocInfo ( unusedAssignment . getNode ( ) ) ; if ( jsDoc != null && jsDoc . hasTypedefType ( ) ) { return ; } boolean inGoogScope = false ; Scope s = v . getScope ( ) ; if ( s . isFunctionBlockScope ( ) ) { Node function = s . getRootNode ( ) . getParent ( ) ; Node callee = function . getPrevious ( ) ; inGoogScope = callee != null && callee . matchesQualifiedName ( "goog.scope" ) ; } if ( inGoogScope ) { // No warning .
return ; } if ( s . isModuleScope ( ) ) { Node statement = NodeUtil . getEnclosingStatement ( v . getNode ( ) ) ; if ( NodeUtil . isNameDeclaration ( statement ) ) { Node lhs = statement . getFirstChild ( ) ; Node rhs = lhs . getFirstChild ( ) ; if ( rhs != null && ( NodeUtil . isCallTo ( rhs , "goog.forwardDeclare" ) || NodeUtil . isCallTo ( rhs , "goog.requireType" ) || NodeUtil . isCallTo ( rhs , "goog.require" ) || rhs . isQualifiedName ( ) ) ) { // No warning . module imports will be caught by the unused - require check , and if the
// right side is a qualified name then this is likely an alias used in type annotations .
return ; } } } compiler . report ( JSError . make ( unusedAssignment . getNode ( ) , UNUSED_LOCAL_ASSIGNMENT , v . name ) ) ; |
public class JmsJcaManagedConnection { /** * Associates the given session with this managed connection . Removes the
* session from its previous managed connection , if any , and adds it to the
* set for this connection . Sets this managed connection on the session .
* @ param object
* the session to associate
* @ throws ResourceException
* generic exception */
@ Override final public void associateConnection ( Object object ) throws ResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "associateConnection" , object ) ; } if ( object instanceof JmsJcaSessionImpl ) { final JmsJcaSessionImpl session = ( JmsJcaSessionImpl ) object ; session . associate ( this ) ; _sessions . add ( session ) ; } else { throw new ResourceAdapterInternalException ( NLS . getFormattedMessage ( ( "INVALID_SESSION_CWSJR1104" ) , new Object [ ] { "associateConnection" , JmsJcaSessionImpl . class . getName ( ) , ( object == null ? "null" : object . getClass ( ) . getName ( ) ) } , null ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "associateConnection" ) ; } |
public class HttpServerBuilder { /** * Sets the connectors .
* @ param connectors
* @ return */
public HttpServerBuilder connectors ( List < Connector > connectors ) { } } | endpoint . setConnectors ( connectors . toArray ( new Connector [ connectors . size ( ) ] ) ) ; return this ; |
public class Jackson { /** * Creates a new { @ link ObjectMapper } with a custom { @ link com . fasterxml . jackson . core . JsonFactory }
* with Guava , Logback , and Joda Time support , as well as support for { @ link JsonSnakeCase } .
* Also includes all { @ link Discoverable } interface implementations .
* @ param jsonFactory instance of { @ link com . fasterxml . jackson . core . JsonFactory } to use
* for the created { @ link com . fasterxml . jackson . databind . ObjectMapper } instance . */
public static ObjectMapper newObjectMapper ( @ Nullable JsonFactory jsonFactory ) { } } | final ObjectMapper mapper = new ObjectMapper ( jsonFactory ) ; return configure ( mapper ) ; |
public class FillOptions { /** * Creates FillOptions out of a Feature .
* @ param feature feature to be converted */
@ Nullable static FillOptions fromFeature ( @ NonNull Feature feature ) { } } | if ( feature . geometry ( ) == null ) { throw new RuntimeException ( "geometry field is required" ) ; } if ( ! ( feature . geometry ( ) instanceof Polygon ) ) { return null ; } FillOptions options = new FillOptions ( ) ; options . geometry = ( Polygon ) feature . geometry ( ) ; if ( feature . hasProperty ( PROPERTY_FILL_OPACITY ) ) { options . fillOpacity = feature . getProperty ( PROPERTY_FILL_OPACITY ) . getAsFloat ( ) ; } if ( feature . hasProperty ( PROPERTY_FILL_COLOR ) ) { options . fillColor = feature . getProperty ( PROPERTY_FILL_COLOR ) . getAsString ( ) ; } if ( feature . hasProperty ( PROPERTY_FILL_OUTLINE_COLOR ) ) { options . fillOutlineColor = feature . getProperty ( PROPERTY_FILL_OUTLINE_COLOR ) . getAsString ( ) ; } if ( feature . hasProperty ( PROPERTY_FILL_PATTERN ) ) { options . fillPattern = feature . getProperty ( PROPERTY_FILL_PATTERN ) . getAsString ( ) ; } if ( feature . hasProperty ( PROPERTY_IS_DRAGGABLE ) ) { options . isDraggable = feature . getProperty ( PROPERTY_IS_DRAGGABLE ) . getAsBoolean ( ) ; } return options ; |
public class FactoryFinder { /** * Try to find provider using Jar Service Provider Mechanism
* @ return instance of provider class if found or null */
private static Object findJarServiceProvider ( final String factoryId ) throws ConfigurationError { } } | final String serviceId = "META-INF/services/" + factoryId ; InputStream is = null ; // First try the Context ClassLoader
ClassLoader cl = ss . getContextClassLoader ( ) ; if ( cl != null ) { is = ss . getResourceAsStream ( cl , serviceId ) ; // If no provider found then try the current ClassLoader
if ( is == null ) { cl = FactoryFinder . class . getClassLoader ( ) ; is = ss . getResourceAsStream ( cl , serviceId ) ; } } else { // No Context ClassLoader , try the current
// ClassLoader
cl = FactoryFinder . class . getClassLoader ( ) ; is = ss . getResourceAsStream ( cl , serviceId ) ; } if ( is == null ) { // No provider found
return null ; } dPrint ( "found jar resource=" + serviceId + " using ClassLoader: " + cl ) ; // Read the service provider name in UTF - 8 as specified in
// the jar spec . Unfortunately this fails in Microsoft
// VJ + + , which does not implement the UTF - 8
// encoding . Theoretically , we should simply let it fail in
// that case , since the JVM is obviously broken if it
// doesn ' t support such a basic standard . But since there
// are still some users attempting to use VJ + + for
// development , we have dropped in a fallback which makes a
// second attempt using the platform ' s default encoding . In
// VJ + + this is apparently ASCII , which is a subset of
// UTF - 8 . . . and since the strings we ' ll be reading here are
// also primarily limited to the 7 - bit ASCII range ( at
// least , in English versions ) , this should work well
// enough to keep us on the air until we ' re ready to
// officially decommit from VJ + + . [ Edited comment from
// jkesselm ]
BufferedReader rd ; try { rd = new BufferedReader ( new InputStreamReader ( is , "UTF-8" ) ) ; } catch ( final java . io . UnsupportedEncodingException e ) { rd = new BufferedReader ( new InputStreamReader ( is ) ) ; } String factoryClassName = null ; try { // XXX Does not handle all possible input as specified by the
// Jar Service Provider specification
factoryClassName = rd . readLine ( ) ; rd . close ( ) ; } catch ( final IOException x ) { // No provider found
return null ; } if ( factoryClassName != null && ! "" . equals ( factoryClassName ) ) { dPrint ( "found in resource, value=" + factoryClassName ) ; // Note : here we do not want to fall back to the current
// ClassLoader because we want to avoid the case where the
// resource file was found using one ClassLoader and the
// provider class was instantiated using a different one .
return newInstance ( factoryClassName , cl , false ) ; } // No provider found
return null ; |
public class SheetBase { /** * Update the sort direction
* @ param sortOrder */
public void setSortOrder ( final java . lang . String sortOrder ) { } } | // when updating , make sure we store off the original so it may be
// restored
final String orig = ( String ) getStateHelper ( ) . get ( PropertyKeys . origSortOrder ) ; if ( orig == null ) { // do not call getSortOrder as it defaults to ascending , we want
// null
// if this is the first call and there is no previous value .
getStateHelper ( ) . put ( PropertyKeys . origSortOrder , getStateHelper ( ) . eval ( PropertyKeys . sortOrder ) ) ; } getStateHelper ( ) . put ( PropertyKeys . sortOrder , sortOrder ) ; |
public class NodeIndexer { /** * Adds the property name to the lucene _ : PROPERTIES _ SET field .
* @ param doc the document .
* @ param name the name of the property .
* @ throws RepositoryException */
private void addPropertyName ( Document doc , InternalQName name ) throws RepositoryException { } } | String fieldName = name . getName ( ) ; try { fieldName = resolver . createJCRName ( name ) . getAsString ( ) ; } catch ( NamespaceException e ) { // will never happen
if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } doc . add ( new Field ( FieldNames . PROPERTIES_SET , fieldName , Field . Store . NO , Field . Index . NOT_ANALYZED_NO_NORMS ) ) ; |
public class SubsequentRequestDispatcher { /** * http : / / code . google . com / p / mobicents / issues / detail ? id = 2547
* Allows to route subsequent requests statelessly to proxy applications to
* improve perf and mem usage . */
private static void handleOrphanRequest ( final SipProvider sipProvider , final SipServletRequestImpl sipServletRequest , String applicationId , final SipContext sipContext ) throws DispatcherException { } } | final String applicationName = sipContext . getApplicationName ( ) ; final Request request = ( Request ) sipServletRequest . getMessage ( ) ; // Making sure to nullify those ref so that if there is a race condition as in
// http : / / code . google . com / p / mobicents / issues / detail ? id = 2937
// we return null instead of the invalidated sip application session
sipServletRequest . setSipSession ( null ) ; sipServletRequest . setSipSessionKey ( null ) ; sipServletRequest . setOrphan ( true ) ; sipServletRequest . setAppSessionId ( applicationId ) ; sipServletRequest . setCurrentApplicationName ( applicationName ) ; try { MessageDispatcher . callServletForOrphanRequest ( sipContext , sipServletRequest ) ; try { String transport = JainSipUtils . findTransport ( request ) ; SipConnector connector = StaticServiceHolder . sipStandardService . findSipConnector ( transport ) ; String branch = ( ( ViaHeader ) sipServletRequest . getMessage ( ) . getHeader ( ViaHeader . NAME ) ) . getBranch ( ) ; ViaHeader via = JainSipUtils . createViaHeader ( sipContext . getSipApplicationDispatcher ( ) . getSipNetworkInterfaceManager ( ) , request , JainSipUtils . createBranch ( "orphan" , sipContext . getSipApplicationDispatcher ( ) . getHashFromApplicationName ( applicationName ) , Integer . toString ( branch . hashCode ( ) ) + branch . substring ( branch . length ( ) / 2 ) ) , null ) ; if ( connector . isUseStaticAddress ( ) ) { try { via . setHost ( connector . getStaticServerAddress ( ) ) ; via . setPort ( connector . getStaticServerPort ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } sipServletRequest . getMessage ( ) . addHeader ( via ) ; sipProvider . sendRequest ( ( Request ) sipServletRequest . getMessage ( ) ) ; sipContext . getSipApplicationDispatcher ( ) . updateRequestsStatistics ( request , false ) ; } catch ( SipException e ) { logger . error ( "Error routing orphaned request" , e ) ; } return ; } catch ( ServletException e ) { throw new DispatcherException ( Response . SERVER_INTERNAL_ERROR , "An unexpected servlet exception occured while processing the following subsequent request " + request , e ) ; } catch ( IOException e ) { throw new DispatcherException ( Response . SERVER_INTERNAL_ERROR , "An unexpected servlet exception occured while processing the following subsequent request " + request , e ) ; } |
public class StringProducers { /** * build the global producer ( thread - safety )
* @ param zookeeperConfig connect config of zookeeper ; ex : 127.0.0.1:2181 / jafka
* @ return the global producer */
public static StringProducers buildGlobal ( String zookeeperConfig ) { } } | if ( instance == null ) { synchronized ( StringProducers . class ) { if ( instance == null ) { final Properties props = new Properties ( ) ; props . put ( "zk.connect" , zookeeperConfig ) ; props . put ( "serializer.class" , StringEncoder . class . getName ( ) ) ; final String JAFKA_PREFIX = "jafka." ; for ( Map . Entry < Object , Object > e : System . getProperties ( ) . entrySet ( ) ) { String name = ( String ) e . getKey ( ) ; if ( name . startsWith ( JAFKA_PREFIX ) ) { props . put ( name . substring ( JAFKA_PREFIX . length ( ) ) , ( String ) e . getValue ( ) ) ; } } instance = new StringProducers ( props , true ) ; } } } return instance ; |
public class AdapterUtil { /** * Display the java . sql . ResultSet concurrency mode constant corresponding to the value
* supplied .
* @ param level a valid java . sql . ResultSet concurrency mode constant .
* @ return the name of the constant , or a string indicating the constant is unknown . */
public static String getConcurrencyModeString ( int concurrency ) { } } | switch ( concurrency ) { case ResultSet . CONCUR_READ_ONLY : return "CONCUR READ ONLY (" + concurrency + ')' ; case ResultSet . CONCUR_UPDATABLE : return "CONCUR UPDATABLE (" + concurrency + ')' ; case CONCUR_SS_SCROLL_LOCKS : return "CONCUR SS SCROLL LOCKS (" + concurrency + ')' ; case CONCUR_SS_OPTIMISTIC_CCVAL : return "CONCUR SS OPTIMISTIC CCVAL (" + concurrency + ')' ; } return "UNKNOWN RESULT SET CONCURRENCY (" + concurrency + ')' ; |
public class DocumentIndexer { /** * @ param metadata
* @ param entity
* @ param document
* @ param metaModel
* Add indexes for associated columns */
protected void addAssociatedEntitiesToDocument ( EntityMetadata metadata , Object entity , Document document , MetamodelImpl metaModel ) { } } | try { IndexCollection indexes = metadata . getEntityClazz ( ) . getAnnotation ( IndexCollection . class ) ; if ( indexes != null ) { List < String > columnsNameToBeIndexed = new ArrayList < String > ( ) ; for ( com . impetus . kundera . index . Index indexedColumn : indexes . columns ( ) ) { Attribute attrib = metaModel . getEntityAttribute ( entity . getClass ( ) , indexedColumn . name ( ) ) ; columnsNameToBeIndexed . add ( ( ( AbstractAttribute ) attrib ) . getJPAColumnName ( ) ) ; } String indexName = metadata . getIndexName ( ) ; List < Relation > relations = metadata . getRelations ( ) ; for ( Relation relation : relations ) { if ( relation . getType ( ) . equals ( ForeignKey . MANY_TO_ONE ) || relation . getType ( ) . equals ( ForeignKey . ONE_TO_ONE ) ) { String propertyName = relation . getJoinColumnName ( null ) ; if ( propertyName != null && columnsNameToBeIndexed . contains ( propertyName ) ) { java . lang . reflect . Field property = relation . getProperty ( ) ; Object obj = PropertyAccessorHelper . getObject ( entity , property ) ; if ( obj != null ) { EntityMetadata relMetaData = metaModel . getEntityMetadata ( obj . getClass ( ) ) ; Object id = PropertyAccessorHelper . getId ( obj , relMetaData ) ; if ( id != null ) { Field luceneField = new Field ( getCannonicalPropertyName ( indexName , propertyName ) , id . toString ( ) , Field . Store . YES , Field . Index . ANALYZED_NO_NORMS ) ; document . add ( luceneField ) ; } else { LOG . warn ( "value is null for field" + property . getName ( ) ) ; } } } } } } } catch ( PropertyAccessException e ) { LOG . error ( "Error in accessing field, Caused by:" + e . getMessage ( ) ) ; throw new LuceneIndexingException ( "Error in creating indexes on associated columns" , e ) ; } |
public class TimestampProcessor { /** * Method updates the timestamp field of the given message with the given time in the given timeUnit .
* @ param < M > the message type of the message which is updated
* @ param time the time which is put in the timestamp field
* @ param messageOrBuilder the message
* @ param timeUnit the unit of time
* @ return the updated message
* @ throws CouldNotPerformException is thrown in case the copy could not be performed e . g . because of a missing timestamp field . */
public static < M extends MessageOrBuilder > M updateTimestamp ( final long time , final M messageOrBuilder , final TimeUnit timeUnit ) throws CouldNotPerformException { } } | long milliseconds = TimeUnit . MILLISECONDS . convert ( time , timeUnit ) ; try { if ( messageOrBuilder == null ) { throw new NotAvailableException ( "messageOrBuilder" ) ; } try { // handle builder
if ( messageOrBuilder . getClass ( ) . getSimpleName ( ) . equals ( "Builder" ) ) { messageOrBuilder . getClass ( ) . getMethod ( SET + TIMESTAMP_NAME , Timestamp . class ) . invoke ( messageOrBuilder , TimestampJavaTimeTransform . transform ( milliseconds ) ) ; return messageOrBuilder ; } // handle message
final Object builder = messageOrBuilder . getClass ( ) . getMethod ( "toBuilder" ) . invoke ( messageOrBuilder ) ; builder . getClass ( ) . getMethod ( SET + TIMESTAMP_NAME , Timestamp . class ) . invoke ( builder , TimestampJavaTimeTransform . transform ( milliseconds ) ) ; return ( M ) builder . getClass ( ) . getMethod ( "build" ) . invoke ( builder ) ; } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException ex ) { throw new NotSupportedException ( "Field[Timestamp]" , messageOrBuilder . getClass ( ) . getName ( ) , ex ) ; } } catch ( CouldNotPerformException ex ) { throw new CouldNotPerformException ( "Could not update timestemp! " , ex ) ; } |
public class Where { /** * Add a ' IS NOT NULL ' clause so the column must not be null . ' & lt ; & gt ; ' NULL does not work . */
public Where < T , ID > isNotNull ( String columnName ) throws SQLException { } } | addClause ( new IsNotNull ( columnName , findColumnFieldType ( columnName ) ) ) ; return this ; |
public class NavMesh { /** * Gets the polygon reference for the tile ' s base polygon .
* @ param tile
* The tile .
* @ return The polygon reference for the base polygon in the specified tile . */
public long getPolyRefBase ( MeshTile tile ) { } } | if ( tile == null ) { return 0 ; } int it = tile . index ; return encodePolyId ( tile . salt , it , 0 ) ; |
public class JDBCUtils { /** * Verifies that the connection is still alive . Returns true if it
* is , false if it is not . If the connection is broken we try
* closing everything , too , so that the caller need only open a new
* connection . */
public static boolean validate ( Statement stmt ) { } } | try { Connection conn = stmt . getConnection ( ) ; if ( conn == null ) return false ; if ( ! conn . isClosed ( ) && conn . isValid ( 10 ) ) return true ; stmt . close ( ) ; conn . close ( ) ; } catch ( SQLException e ) { // this may well fail . that doesn ' t matter . we ' re just making an
// attempt to clean up , and if we can ' t , that ' s just too bad .
} return false ; |
public class ApiOvhTelephony { /** * Alter this object properties
* REST : PUT / telephony / { billingAccount } / voicemail / { serviceName }
* @ param body [ required ] New object properties
* @ param billingAccount [ required ] The name of your billingAccount
* @ param serviceName [ required ] */
public void billingAccount_voicemail_serviceName_PUT ( String billingAccount , String serviceName , OvhVoicemail body ) throws IOException { } } | String qPath = "/telephony/{billingAccount}/voicemail/{serviceName}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ; |
public class Dataset { /** * Add a datum item into the dataset .
* @ param x a datum item .
* @ return the added datum item . */
public Datum < E > add ( Datum < E > x ) { } } | data . add ( x ) ; return x ; |
public class DataFramePrinter { /** * Returns the 2 - D array of data tokens from the frame specified
* @ param frame the DataFrame from which to create 2D array of formatted tokens
* @ return the array of data tokens */
private String [ ] [ ] getDataTokens ( Relation frame ) { } } | if ( frame . rowCount ( ) == 0 ) return new String [ 0 ] [ 0 ] ; final int rowCount = Math . min ( maxRows , frame . rowCount ( ) ) ; final boolean truncated = frame . rowCount ( ) > maxRows ; final int colCount = frame . columnCount ( ) ; final String [ ] [ ] data ; if ( truncated ) { data = new String [ rowCount + 1 ] [ colCount ] ; int i ; for ( i = 0 ; i < Math . ceil ( ( double ) rowCount / 2 ) ; i ++ ) { for ( int j = 0 ; j < colCount ; j ++ ) { data [ i ] [ j ] = frame . getString ( i , j ) ; } } for ( int j = 0 ; j < colCount ; j ++ ) { data [ i ] [ j ] = "..." ; } for ( ++ i ; i <= rowCount ; i ++ ) { for ( int j = 0 ; j < colCount ; j ++ ) { data [ i ] [ j ] = frame . getString ( frame . rowCount ( ) - maxRows + i - 1 , j ) ; } } } else { data = new String [ rowCount ] [ colCount ] ; for ( int i = 0 ; i < rowCount ; i ++ ) { for ( int j = 0 ; j < colCount ; j ++ ) { String value = frame . getString ( i , j ) ; data [ i ] [ j ] = value == null ? "" : value ; } } } return data ; |
public class InventoryNavigator { /** * Retrieve content recursively with multiple properties .
* the typeinfo array contains typename + properties to retrieve .
* @ param typeinfo 2D array of properties for each typename
* @ param recurse retrieve contents recursively from the root down
* @ return retrieved object contents
* @ throws RemoteException
* @ throws RuntimeFault
* @ throws InvalidProperty */
public ManagedEntity [ ] searchManagedEntities ( String [ ] [ ] typeinfo , boolean recurse ) throws InvalidProperty , RuntimeFault , RemoteException { } } | ObjectContent [ ] ocs = retrieveObjectContents ( typeinfo , recurse ) ; return createManagedEntities ( ocs ) ; |
public class AbstractTriangle3F { /** * Tests if the point { @ code ( px , py , pz ) }
* lies inside a 3D triangle
* given by { @ code ( x1 , y1 , z1 ) } , { @ code ( x2 , y2 , z2 ) }
* and { @ code ( x3 , y3 , z3 ) } points .
* < strong > Caution : Tests are " epsiloned . " < / strong >
* Parameter < var > forceCoplanar < / var > has a deep influence on the function
* result . It indicates if coplanarity test must be done or not .
* Following table explains this influence :
* < table border = " 1 " width = " 100 % " summary = " " >
* < thead >
* < tr >
* < tr > Point is coplanar ? < / tr >
* < tr > Point projection on plane is inside triangle ? < / tr >
* < tr > < var > forceCoplanar < / var > < / tr >
* < tr > < code > intersectsPointTrangle ( ) < / code > Result < / tr >
* < / tr >
* < / thead >
* < tr >
* < td > < code > true < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > true < / code > < / td >
* < / tr >
* < tr >
* < td > < code > true < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < / tr >
* < tr >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < / tr >
* < tr >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < / tr >
* < tr >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < / tr >
* < tr >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < / tr >
* < tr >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > true < / code > < / td >
* < td > < code > false < / code > < / td >
* < / tr >
* < tr >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < td > < code > false < / code > < / td >
* < / tr >
* < / table >
* < strong > Trigonometric Method ( Slowest ) < / strong >
* A common way to check if a point is in a triangle is to
* find the vectors connecting the point to each of the
* triangle ' s three vertices and sum the angles between
* those vectors . If the sum of the angles is 2 * pi
* then the point is inside the triangle , otherwise it
* is not . < em > It works , but it is very slow . < / em >
* < center > < img src = " doc - files / point _ segment . gif " alt = " Point - Segment Intersection Picture 1 " >
* < img src = " doc - files / point _ segment _ 2 . jpg " alt = " Point - Segment Intersection Picture 2 " > < / center >
* The advantage of the method above is that it ' s very simple to understand so that once
* you read it you should be able to remember it forever and code it up at
* any time without having to refer back to anything .
* < strong > Barycenric Method ( Fastest ) < / strong >
* There ' s another method that is also as easy conceptually but executes faster .
* The downside is there ' s a little more math involved , but once you see
* it worked out it should be no problem .
* So remember that the three points of the triangle define a plane in space .
* Pick one of the points and we can consider all other locations on the plane
* as relative to that point . Let ' s select A - - it ' ll be our origin on the
* plane . Now what we need are basis vectors so we can give coordinate
* values to all the locations on the plane .
* We ' ll pick the two edges of the triangle that touch A ,
* ( C - A ) and ( B - A ) .
* Now we can get to any point on the plane just by starting at A
* and walking some distance along ( C - A ) and then from there walking
* some more in the direction ( B - A ) .
* < center > < img src = " doc - files / point _ segment _ 3 . png " alt = " Point - Segment Intersection Picture 3 " > < / center >
* With that in mind we can now describe any point on the plane as : < br >
* P = A + u * ( C - A ) + v * ( B - A )
* Notice now that if u or v < 0 then we ' ve walked in the wrong direction
* and must be outside the triangle . Also if u or v > 1 then we ' ve
* walked too far in a direction and are outside the triangle .
* Finally if u + v > 1 then we ' ve crossed the edge BC again leaving the triangle .
* Given u and v we can easily calculate the point P with the above
* equation , but how can we go in the reverse direction and calculate
* u and v from a given point P ? < br >
* P = A + u * ( C - A ) + v * ( B - A ) / / Original equation < br >
* ( P - A ) = u * ( C - A ) + v * ( B - A ) / / Subtract A from both sides < br >
* v2 = u * v0 + v * v1 / / Substitute v0 , v1 , v2 for less writing
* We have two unknowns ( u and v ) so we need two equations to solve
* for them . Dot both sides by v0 to get one and dot both sides by
* v1 to get a second . < br >
* ( v2 ) . v0 = ( u * v0 + v * v1 ) . v0 < br >
* ( v2 ) . v1 = ( u * v0 + v * v1 ) . v1 < br >
* Distribute v0 and v1 < br >
* v2 . v0 = u * ( v0 . v0 ) + v * ( v1 . v0 ) < br >
* v2 . v1 = u * ( v0 . v1 ) + v * ( v1 . v1)
* Now we have two equations and two unknowns and can solve one
* equation for one variable and substitute into the other . Or
* fire up GNU Octave and save some handwriting . < br >
* Solve [ v2 . v0 = = { u ( v0 . v0 ) + v ( v1 . v0 ) , v2 . v1 = = u ( v0 . v1 ) + v ( v1 . v1 ) } , { u , v } ] < br >
* u = ( ( v1 . v1 ) ( v2 . v0 ) - ( v1 . v0 ) ( v2 . v1 ) ) / ( ( v0 . v0 ) ( v1 . v1 ) - ( v0 . v1 ) ( v1 . v0 ) ) < br >
* v = ( ( v0 . v0 ) ( v2 . v1 ) - ( v0 . v1 ) ( v2 . v0 ) ) / ( ( v0 . v0 ) ( v1 . v1 ) - ( v0 . v1 ) ( v1 . v0 ) )
* @ param ax the X coordinate of the first point of the triangle
* @ param ay the Y coordinate of the first point of the triangle
* @ param az the Z coordinate of the first point of the triangle
* @ param bx the X coordinate of the second point of the triangle
* @ param by the Y coordinate of the second point of the triangle
* @ param bz the Z coordinate of the second point of the triangle
* @ param cx the X coordinate of the third point of the triangle
* @ param cy the Y coordinate of the third point of the triangle
* @ param cz the Z coordinate of the third point of the triangle
* @ param px the X coordinate of the point
* @ param py the Y coordinate of the point
* @ param pz the Z coordinate of the point
* @ param forceCoplanar is < code > true < / code > to force to test
* if the given point is coplanar to the triangle , < code > false < / code >
* to not consider coplanarity of the point .
* @ param epsilon the accuracy parameter ( distance ) must be the same unit of measurement as others parameters
* @ return < code > true < / code > if the points is coplanar - or not ,
* depending on < var > forceCoplanar < / var > - to the triangle and
* lies inside it , otherwise < code > false < / code >
* @ since 3.0 */
@ Pure public static boolean containsTrianglePoint ( double ax , double ay , double az , double bx , double by , double bz , double cx , double cy , double cz , double px , double py , double pz , boolean forceCoplanar , double epsilon ) { } } | // Compute vectors
// v0 = C - A
double v0x = cx - ax ; double v0y = cy - ay ; double v0z = cz - az ; // v1 = B - A
double v1x = bx - ax ; double v1y = by - ay ; double v1z = bz - az ; // v2 = P - A
double v2x = px - ax ; double v2y = py - ay ; double v2z = pz - az ; // Compute dot products
// dot01 = dot ( v0 , v0)
double dot00 = FunctionalVector3D . dotProduct ( v0x , v0y , v0z , v0x , v0y , v0z ) ; // dot01 = dot ( v0 , v1)
double dot01 = FunctionalVector3D . dotProduct ( v0x , v0y , v0z , v1x , v1y , v1z ) ; // dot02 = dot ( v0 , v2)
double dot02 = FunctionalVector3D . dotProduct ( v0x , v0y , v0z , v2x , v2y , v2z ) ; // dot11 = dot ( v1 , v1)
double dot11 = FunctionalVector3D . dotProduct ( v1x , v1y , v1z , v1x , v1y , v1z ) ; // dot12 = dot ( v1 , v2)
double dot12 = FunctionalVector3D . dotProduct ( v1x , v1y , v1z , v2x , v2y , v2z ) ; // Compute barycentric coordinates
double invDenom = 1. / ( dot00 * dot11 - dot01 * dot01 ) ; double u = ( dot11 * dot02 - dot01 * dot12 ) * invDenom ; double v = ( dot00 * dot12 - dot01 * dot02 ) * invDenom ; // Check if point is in triangle
if ( ( MathUtil . compareEpsilon ( u , 0. , epsilon ) >= 0 ) && ( MathUtil . compareEpsilon ( v , 0. , epsilon ) >= 0 ) && ( MathUtil . compareEpsilon ( u + v , 1. , epsilon ) <= 0 ) ) { if ( forceCoplanar ) { // Triangle ' s plane equation :
// nx = ay * ( bz - cz ) + by * ( cz - az ) + cy * ( az - bz )
// ny = az * ( bx - cx ) + bz * ( cx - ax ) + cz * ( ax - bx )
// nz = ax * ( by - cy ) + bx * ( cy - ay ) + cx * ( ay - by )
// d = - ( nx * ax + ny * ay + nz * az )
// Reuse the dot * variables to prevent memory allocation
dot00 = ay * ( bz - cz ) + by * v0z - cy * v1z ; dot01 = az * ( bx - cx ) + bz * v0x - cz * v1x ; dot02 = ax * ( by - cy ) + bx * v0y - cx * v1y ; dot11 = - ( dot00 * ax + dot01 * ay + dot02 * az ) ; dot12 = dot00 * px + dot01 * py + dot02 * pz + dot11 ; return MathUtil . isEpsilonZero ( dot12 , epsilon ) ; } return true ; } return false ; |
public class WxPayApi { /** * 企业付款到银行
* @ param params
* 请求参数
* @ param certPath
* 证书文件目录
* @ param certPassword
* 证书密码
* @ return { String } */
public static String payBank ( Map < String , String > params , String certPath , String certPassword ) { } } | return WxPayApi . doPostSSL ( PAY_BANK_URL , params , certPath , certPassword ) ; |
public class SerializerIntrinsics { /** * ! st ( 0 ) ( FPU ) . */
public final void fisub ( Mem src ) { } } | assert ( src . size ( ) == 2 || src . size ( ) == 4 ) ; emitX86 ( INST_FISUB , src ) ; |
public class MessageStack { /** * Send this message to the message queue .
* @ param strCommand Command to perform remotely .
* @ return boolean success . */
public void sendMessage ( Message message ) { } } | if ( message == null ) return ; // Don ' t allow a null message to be sent .
m_stack . add ( message ) ; if ( m_bWaiting ) { synchronized ( m_thread ) { if ( m_bWaiting ) // Inside the sync block
m_thread . notify ( ) ; } } |
public class ConfigFactory { /** * Loads an application ' s configuration from the given classpath resource or
* classpath resource basename , sandwiches it between default reference
* config and default overrides , and then resolves it . The classpath
* resource is " raw " ( it should have no " / " prefix , and is not made relative
* to any package , so it ' s like { @ link ClassLoader # getResource } not
* { @ link Class # getResource } ) .
* Resources are loaded from the current thread ' s
* { @ link Thread # getContextClassLoader ( ) } . In general , a library needs its
* configuration to come from the class loader used to load that library , so
* the proper " reference . conf " are present .
* The loaded object will already be resolved ( substitutions have already
* been processed ) . As a result , if you add more fallbacks then they won ' t
* be seen by substitutions . Substitutions are the " $ { foo . bar } " syntax . If
* you want to parse additional files or something then you need to use
* { @ link # load ( Config ) } .
* To load a standalone resource ( without the default reference and default
* overrides ) , use { @ link # parseResourcesAnySyntax ( String ) } rather than this
* method . To load only the reference config use { @ link # defaultReference ( ) }
* and to load only the overrides use { @ link # defaultOverrides ( ) } .
* @ param resourceBasename
* name ( optionally without extension ) of a resource on classpath
* @ return configuration for an application relative to context class loader */
public static Config load ( String resourceBasename ) { } } | return load ( resourceBasename , ConfigParseOptions . defaults ( ) , ConfigResolveOptions . defaults ( ) ) ; |
public class IconResolver { /** * Searches for the unicode character value for the Font Icon Code . This method searches all
* active FontIcons in the application .
* @ param iconCode the font icon code
* @ return the unicode character matching the icon , or null if none matches */
private static IconSet resolveIconSet ( String iconCode ) { } } | CharSequence unicode ; for ( IconSet set : getRegisteredIconSets ( ) ) { if ( set . fontPath ( ) . equals ( FontAwesome . FONT_PATH ) || set . fontPath ( ) . equals ( Typicon . FONT_PATH ) || set . fontPath ( ) . equals ( MaterialIcons . FONT_PATH ) ) { continue ; // already checked previously , ignore
} unicode = set . unicodeForKey ( iconCode ) ; if ( unicode != null ) { return set ; } } String message = String . format ( "Could not find FontIcon value for '%s', " + "please ensure that it is mapped to a valid font" , iconCode ) ; throw new IllegalArgumentException ( message ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.