signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JcrSession { /** * Returns whether the authenticated user has the given role . * @ param context the security context * @ param roleName the name of the role to check * @ param repositoryName the name of the repository * @ param workspaceName the workspace under which the user must have the role . This may be different from the current * workspace . * @ return true if the user has the role and is logged in ; false otherwise */ static boolean hasRole ( SecurityContext context , String roleName , String repositoryName , String workspaceName ) { } }
if ( context . hasRole ( roleName ) ) return true ; roleName = roleName + "." + repositoryName ; if ( context . hasRole ( roleName ) ) return true ; roleName = roleName + "." + workspaceName ; return context . hasRole ( roleName ) ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcBurnerTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class LanguageUtils { /** * Returns a map of all translations for a given language . * Defaults to the default language which must be set . * @ param appid appid name of the { @ link com . erudika . para . core . App } * @ param langCode the 2 - letter language code * @ return the language map */ public Map < String , String > readLanguage ( String appid , String langCode ) { } }
if ( StringUtils . isBlank ( langCode ) || langCode . equals ( getDefaultLanguageCode ( ) ) ) { return getDefaultLanguage ( appid ) ; } else if ( langCode . length ( ) > 2 && ! ALL_LOCALES . containsKey ( langCode ) ) { return readLanguage ( appid , langCode . substring ( 0 , 2 ) ) ; } else if ( LANG_CACHE . containsKey ( langCode ) ) { return LANG_CACHE . get ( langCode ) ; } // load language map from file Map < String , String > lang = readLanguageFromFile ( appid , langCode ) ; if ( lang == null || lang . isEmpty ( ) ) { // or try to load from DB lang = new TreeMap < String , String > ( getDefaultLanguage ( appid ) ) ; Sysprop s = dao . read ( appid , keyPrefix . concat ( langCode ) ) ; if ( s != null && ! s . getProperties ( ) . isEmpty ( ) ) { Map < String , Object > loaded = s . getProperties ( ) ; for ( Map . Entry < String , String > entry : lang . entrySet ( ) ) { if ( loaded . containsKey ( entry . getKey ( ) ) ) { lang . put ( entry . getKey ( ) , String . valueOf ( loaded . get ( entry . getKey ( ) ) ) ) ; } else { lang . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } } LANG_CACHE . put ( langCode , lang ) ; } return Collections . unmodifiableMap ( lang ) ;
public class FormattedString { /** * Creates a FormattedString for each of the given objects . */ public static List < FormattedString > of ( String format , List < ? > objects ) { } }
return objects . stream ( ) . map ( object -> of ( format , object ) ) . collect ( toList ( ) ) ;
public class FileSystem { /** * Filter files / directories in the given list of paths using user - supplied * path filter . * If one of the user supplied directory does not exist , the method silently * skips it and continues with the remaining directories . * @ param files * a list of paths * @ param filter * the user - supplied path filter * @ return a list of statuses for the files under the given paths after * applying the filter * @ exception IOException */ public FileStatus [ ] listStatus ( Path [ ] files , PathFilter filter ) throws IOException { } }
ArrayList < FileStatus > results = new ArrayList < FileStatus > ( ) ; for ( int i = 0 ; i < files . length ; i ++ ) { try { listStatus ( results , files [ i ] , filter ) ; } catch ( FileNotFoundException e ) { LOG . info ( "Parent path doesn't exist: " + e . getMessage ( ) ) ; } } return results . toArray ( new FileStatus [ results . size ( ) ] ) ;
public class BusItinerary { /** * Replies the list of the bus halts of the bus itinerary . * @ return a list of bus halts */ @ Pure public Iterator < BusItineraryHalt > busHaltIterator ( ) { } }
return Iterators . concat ( this . validHalts . iterator ( ) , this . invalidHalts . iterator ( ) ) ;
public class ValueHolder { /** * Factory method to construct an instance of the ValueHolder class with a Comparable value . The ValueHolder * implementation itself implements the Comparable interface . * @ param < T > the Class type of the Comparable value . * @ param value the Comparable value to hold . * @ return a ValueHolder implementation that holds Comparable values . * @ see java . lang . Comparable */ public static < T extends Comparable < T > > ComparableValueHolder < T > withComparableValue ( final T value ) { } }
return new ComparableValueHolder < > ( value ) ;
public class ExpressionUtils { /** * Returns whether the given character is a Unicode symbol */ static boolean isSymbolChar ( int ch ) { } }
int t = Character . getType ( ch ) ; return t == Character . MATH_SYMBOL || t == Character . CURRENCY_SYMBOL || t == Character . MODIFIER_SYMBOL || t == Character . OTHER_SYMBOL ;
public class GermanSpellerRule { /** * non - native speakers and cannot be found by just looking for similar words . */ @ Nullable private String getParticipleSuggestion ( String word ) { } }
if ( word . startsWith ( "ge" ) && word . endsWith ( "t" ) ) { // strip leading " ge " and replace trailing " t " with " en " : String baseform = word . substring ( 2 , word . length ( ) - 1 ) + "en" ; try { String participle = getParticipleForBaseform ( baseform ) ; if ( participle != null ) { return participle ; } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } return null ;
public class MvpProcessor { /** * < p > Gets presenters { @ link java . util . List } annotated with { @ link com . arellomobile . mvp . presenter . InjectPresenter } for view . < / p > * < p > See full info about getting presenter instance in { @ link # getMvpPresenter } < / p > * @ param delegated class contains presenter * @ param delegateTag unique tag generated by { @ link MvpDelegate # generateTag ( ) } * @ param < Delegated > type of delegated * @ return presenters list for specifies presenters container */ < Delegated > List < MvpPresenter < ? super Delegated > > getMvpPresenters ( Delegated delegated , String delegateTag ) { } }
if ( ! hasMoxyReflector ( ) ) { return Collections . emptyList ( ) ; } @ SuppressWarnings ( "unchecked" ) Class < ? super Delegated > aClass = ( Class < Delegated > ) delegated . getClass ( ) ; List < Object > presenterBinders = null ; while ( aClass != Object . class && presenterBinders == null ) { presenterBinders = MoxyReflector . getPresenterBinders ( aClass ) ; aClass = aClass . getSuperclass ( ) ; } if ( presenterBinders == null || presenterBinders . isEmpty ( ) ) { return Collections . emptyList ( ) ; } List < MvpPresenter < ? super Delegated > > presenters = new ArrayList < > ( ) ; PresentersCounter presentersCounter = MvpFacade . getInstance ( ) . getPresentersCounter ( ) ; for ( Object presenterBinderObject : presenterBinders ) { // noinspection unchecked PresenterBinder < Delegated > presenterBinder = ( PresenterBinder < Delegated > ) presenterBinderObject ; List < PresenterField < Delegated > > presenterFields = presenterBinder . getPresenterFields ( ) ; for ( PresenterField < Delegated > presenterField : presenterFields ) { MvpPresenter < ? super Delegated > presenter = getMvpPresenter ( delegated , presenterField , delegateTag ) ; if ( presenter != null ) { presentersCounter . injectPresenter ( presenter , delegateTag ) ; presenters . add ( presenter ) ; presenterField . bind ( delegated , presenter ) ; } } } return presenters ;
public class TrueTypeFontUnicode { /** * Outputs to the writer the font dictionaries and streams . * @ param writer the writer for this document * @ param ref the font indirect reference * @ param params several parameters that depend on the font type * @ throws IOException on error * @ throws DocumentException error in generating the object */ void writeFont ( PdfWriter writer , PdfIndirectReference ref , Object params [ ] ) throws DocumentException , IOException { } }
HashMap longTag = ( HashMap ) params [ 0 ] ; addRangeUni ( longTag , true , subset ) ; Object metrics [ ] = longTag . values ( ) . toArray ( ) ; Arrays . sort ( metrics , this ) ; PdfIndirectReference ind_font = null ; PdfObject pobj = null ; PdfIndirectObject obj = null ; PdfIndirectReference cidset = null ; if ( writer . getPDFXConformance ( ) == PdfWriter . PDFA1A || writer . getPDFXConformance ( ) == PdfWriter . PDFA1B ) { PdfStream stream ; if ( metrics . length == 0 ) { stream = new PdfStream ( new byte [ ] { ( byte ) 0x80 } ) ; } else { int top = ( ( int [ ] ) metrics [ metrics . length - 1 ] ) [ 0 ] ; byte [ ] bt = new byte [ top / 8 + 1 ] ; for ( int k = 0 ; k < metrics . length ; ++ k ) { int v = ( ( int [ ] ) metrics [ k ] ) [ 0 ] ; bt [ v / 8 ] |= rotbits [ v % 8 ] ; } stream = new PdfStream ( bt ) ; stream . flateCompress ( compressionLevel ) ; } cidset = writer . addToBody ( stream ) . getIndirectReference ( ) ; } // sivan : cff if ( cff ) { byte b [ ] = readCffFont ( ) ; if ( subset || subsetRanges != null ) { CFFFontSubset cff = new CFFFontSubset ( new RandomAccessFileOrArray ( b ) , longTag ) ; b = cff . Process ( cff . getNames ( ) [ 0 ] ) ; } pobj = new StreamFont ( b , "CIDFontType0C" , compressionLevel ) ; obj = writer . addToBody ( pobj ) ; ind_font = obj . getIndirectReference ( ) ; } else { byte [ ] b ; if ( subset || directoryOffset != 0 ) { TrueTypeFontSubSet sb = new TrueTypeFontSubSet ( fileName , new RandomAccessFileOrArray ( rf ) , longTag , directoryOffset , false , false ) ; b = sb . process ( ) ; } else { b = getFullFont ( ) ; } int lengths [ ] = new int [ ] { b . length } ; pobj = new StreamFont ( b , lengths , compressionLevel ) ; obj = writer . addToBody ( pobj ) ; ind_font = obj . getIndirectReference ( ) ; } String subsetPrefix = "" ; if ( subset ) subsetPrefix = createSubsetPrefix ( ) ; PdfDictionary dic = getFontDescriptor ( ind_font , subsetPrefix , cidset ) ; obj = writer . addToBody ( dic ) ; ind_font = obj . getIndirectReference ( ) ; pobj = getCIDFontType2 ( ind_font , subsetPrefix , metrics ) ; obj = writer . addToBody ( pobj ) ; ind_font = obj . getIndirectReference ( ) ; pobj = getToUnicode ( metrics ) ; PdfIndirectReference toUnicodeRef = null ; if ( pobj != null ) { obj = writer . addToBody ( pobj ) ; toUnicodeRef = obj . getIndirectReference ( ) ; } pobj = getFontBaseType ( ind_font , subsetPrefix , toUnicodeRef ) ; writer . addToBody ( pobj , ref ) ;
public class LocalCachedMapOptions { /** * Sets time to live for each map entry in local cache . * If value equals to < code > 0 < / code > then timeout is not applied * @ param timeToLive - time to live * @ param timeUnit - time unit * @ return LocalCachedMapOptions instance */ public LocalCachedMapOptions < K , V > timeToLive ( long timeToLive , TimeUnit timeUnit ) { } }
return timeToLive ( timeUnit . toMillis ( timeToLive ) ) ;
public class Transaction { /** * Closes the transaction . If the transaction has not been committed , * it is rolled back . */ public void close ( ) { } }
try { state = "CLOSING" ; if ( connection != null ) { logger . warn ( "Connection not committed, rolling back." ) ; rollback ( ) ; } if ( ! events . empty ( ) ) { state = "CLOSING EVENTS" ; do { Execution exec = ( Execution ) events . pop ( ) ; try { Stack < Execution > stack ; exec . close ( ) ; stack = eventCache . get ( exec . getClass ( ) . getName ( ) ) ; if ( stack != null ) { stack . push ( exec ) ; } } catch ( Throwable t ) { logger . error ( t . getMessage ( ) , t ) ; } } while ( ! events . empty ( ) ) ; } state = "CLOSED" ; } finally { if ( tracking ) { transactions . remove ( new Integer ( transactionId ) ) ; } events . clear ( ) ; statements . clear ( ) ; stackTrace = null ; }
public class DescribeNetworkInterfacesRequest { /** * One or more network interface IDs . * Default : Describes all your network interfaces . * @ return One or more network interface IDs . < / p > * Default : Describes all your network interfaces . */ public java . util . List < String > getNetworkInterfaceIds ( ) { } }
if ( networkInterfaceIds == null ) { networkInterfaceIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return networkInterfaceIds ;
public class GBSInsertFringe { /** * Balance a fringe with a K factor of sixteen . * @ param stack The stack of nodes through which the insert operation * passed . * @ param bparent The parent of the fringe balance point . * @ param fpoint The fringe balance point ( the top of the fringe ) . * @ param fpidx The index within the stack of fpoint * @ param maxBal Maximum allowed fringe imbalance . */ private void balance16 ( NodeStack stack , GBSNode bparent , /* Parent of fringe balance point */ GBSNode fpoint , /* Fringe balance point */ int fpidx , /* Index within stack of fpoint */ int maxBal ) /* Maximum allowed fringe imbalance */ { } }
/* k = 16 , 2k - 1 = 31 , k - 1 = 15 [ A - B - C - D - E - F - G - H - I - J - K - L - M - N - O ] [ 15 children ] becomes : * - - - D - - - * * - - - L - - - * * - B - * * - F - * * - J - * * - N - * A C E G I K M O * - - - D - - - * * - - - L - - - * * - B - * * - F - * * - J - * * - N - * [ 17 children ] A C E G I K M O - P - Q - R - S - T - U - V - W - X - Y - Z - 0-1-2-3-4 becomes : * - - - - - H - - - - - * * - - - - - X - - - - - * * - - - D - - - * * - - - L - - - * * - - - T - - - * * - - - 1 - - - * * - B - * * - F - * * - J - * * - N - * * - R - * * - V - * * - Z - * * - 3 - * A C E G I K M O Q S U W Y 0 2 4 * - - - D - - - * * - - - L - - - * * - B - * * - F - * * - J - * * - N - * [ 17 children ] A C E G I K M O - A - B - C - D - E - F - G - H - I - J - K - L - M - N - O - P becomes : * - - - - - H - - - - - * * - - - - - I - - - - - * * - - - D - - - * * - - - L - - - * * - - - E - - - * * - - - M - - - * * - B - * * - F - * * - J - * * - N - * * - C - * * - G - * * - K - * * - O - * A C E G I K M O B D F H J L N P */ if ( maxBal == 15 ) { GBSNode a = fpoint ; GBSNode b = a . rightChild ( ) ; GBSNode c = b . rightChild ( ) ; GBSNode d = c . rightChild ( ) ; GBSNode e = d . rightChild ( ) ; GBSNode f = e . rightChild ( ) ; GBSNode g = f . rightChild ( ) ; GBSNode h = g . rightChild ( ) ; GBSNode i = h . rightChild ( ) ; GBSNode j = i . rightChild ( ) ; GBSNode k = j . rightChild ( ) ; GBSNode l = k . rightChild ( ) ; GBSNode m = l . rightChild ( ) ; GBSNode n = m . rightChild ( ) ; h . setLeftChild ( d ) ; h . setRightChild ( l ) ; d . setLeftChild ( b ) ; d . setRightChild ( f ) ; b . setLeftChild ( a ) ; a . clearRightChild ( ) ; c . clearRightChild ( ) ; f . setLeftChild ( e ) ; e . clearRightChild ( ) ; g . clearRightChild ( ) ; l . setLeftChild ( j ) ; l . setRightChild ( n ) ; j . setLeftChild ( i ) ; i . clearRightChild ( ) ; k . clearRightChild ( ) ; n . setLeftChild ( m ) ; m . clearRightChild ( ) ; if ( bparent . rightChild ( ) == a ) bparent . setRightChild ( h ) ; else bparent . setLeftChild ( h ) ; } else { if ( maxBal != 17 ) error ( "fringeBalance16: maxBal != 17, maxBal = " + maxBal ) ; GBSNode t0_top = stack . node ( fpidx - 4 ) ; GBSNode h1 = stack . node ( fpidx - 3 ) ; GBSNode a = fpoint . rightChild ( ) ; GBSNode b = a . rightChild ( ) ; GBSNode c = b . rightChild ( ) ; GBSNode d = c . rightChild ( ) ; GBSNode e = d . rightChild ( ) ; GBSNode f = e . rightChild ( ) ; GBSNode g = f . rightChild ( ) ; GBSNode h = g . rightChild ( ) ; GBSNode i = h . rightChild ( ) ; GBSNode j = i . rightChild ( ) ; GBSNode k = j . rightChild ( ) ; GBSNode l = k . rightChild ( ) ; GBSNode m = l . rightChild ( ) ; GBSNode n = m . rightChild ( ) ; GBSNode o = n . rightChild ( ) ; if ( t0_top . rightChild ( ) == h1 ) t0_top . setRightChild ( a ) ; else t0_top . setLeftChild ( a ) ; a . setLeftChild ( h1 ) ; a . setRightChild ( i ) ; i . setLeftChild ( e ) ; i . setRightChild ( m ) ; e . setLeftChild ( c ) ; e . setRightChild ( g ) ; c . setLeftChild ( b ) ; b . clearRightChild ( ) ; d . clearRightChild ( ) ; g . setLeftChild ( f ) ; f . clearRightChild ( ) ; h . clearRightChild ( ) ; m . setLeftChild ( k ) ; m . setRightChild ( o ) ; k . setLeftChild ( j ) ; j . clearRightChild ( ) ; l . clearRightChild ( ) ; o . setLeftChild ( n ) ; n . clearRightChild ( ) ; fpoint . clearRightChild ( ) ; if ( ( fpidx > 4 ) && ( stack . balancePointIndex ( ) > - 1 ) ) { GBSNode qpoint = a ; stack . setNode ( fpidx - 3 , qpoint ) ; GBSInsertHeight . singleInstance ( ) . balance ( stack , qpoint ) ; } }
public class MDLRXNReader { /** * Read a Reaction from a file in MDL RXN format * @ return The Reaction that was read from the MDL file . */ private IReaction readReaction ( IChemObjectBuilder builder ) throws CDKException { } }
logger . debug ( "Reading new reaction" ) ; int linecount = 0 ; IReaction reaction = builder . newInstance ( IReaction . class ) ; try { input . readLine ( ) ; // first line should be $ RXN input . readLine ( ) ; // second line input . readLine ( ) ; // third line input . readLine ( ) ; // fourth line } catch ( IOException exception ) { logger . debug ( exception ) ; throw new CDKException ( "Error while reading header of RXN file" , exception ) ; } int reactantCount = 0 ; int productCount = 0 ; try { String countsLine = input . readLine ( ) ; linecount ++ ; if ( countsLine == null ) { return null ; } logger . debug ( "Line " + linecount + ": " + countsLine ) ; if ( countsLine . startsWith ( "$$$$" ) ) { logger . debug ( "File is empty, returning empty reaction" ) ; return reaction ; } /* * this line contains the number of reactants and products */ StringTokenizer tokenizer = new StringTokenizer ( countsLine ) ; reactantCount = Integer . valueOf ( tokenizer . nextToken ( ) ) . intValue ( ) ; logger . info ( "Expecting " + reactantCount + " reactants in file" ) ; productCount = Integer . valueOf ( tokenizer . nextToken ( ) ) . intValue ( ) ; logger . info ( "Expecting " + productCount + " products in file" ) ; } catch ( IOException | NumberFormatException exception ) { logger . debug ( exception ) ; throw new CDKException ( "Error while counts line of RXN file" , exception ) ; } // now read the reactants try { for ( int i = 1 ; i <= reactantCount ; i ++ ) { StringBuffer molFile = new StringBuffer ( ) ; input . readLine ( ) ; // announceMDLFileLine String molFileLine = "" ; do { molFileLine = input . readLine ( ) ; molFile . append ( molFileLine ) ; molFile . append ( '\n' ) ; } while ( ! molFileLine . equals ( "M END" ) ) ; // read MDL molfile content MDLReader reader = new MDLReader ( new StringReader ( molFile . toString ( ) ) ) ; IAtomContainer reactant = ( IAtomContainer ) reader . read ( builder . newInstance ( IAtomContainer . class ) ) ; reader . close ( ) ; // add reactant reaction . addReactant ( reactant ) ; } } catch ( CDKException exception ) { // rethrow exception from MDLReader throw exception ; } catch ( IOException | IllegalArgumentException exception ) { logger . debug ( exception ) ; throw new CDKException ( "Error while reading reactant" , exception ) ; } // now read the products try { for ( int i = 1 ; i <= productCount ; i ++ ) { StringBuffer molFile = new StringBuffer ( ) ; input . readLine ( ) ; // String announceMDLFileLine = String molFileLine = "" ; do { molFileLine = input . readLine ( ) ; molFile . append ( molFileLine ) ; molFile . append ( '\n' ) ; } while ( ! molFileLine . equals ( "M END" ) ) ; // read MDL molfile content MDLReader reader = new MDLReader ( new StringReader ( molFile . toString ( ) ) , super . mode ) ; IAtomContainer product = ( IAtomContainer ) reader . read ( builder . newInstance ( IAtomContainer . class ) ) ; reader . close ( ) ; // add reactant reaction . addProduct ( product ) ; } } catch ( CDKException exception ) { // rethrow exception from MDLReader throw exception ; } catch ( IOException | IllegalArgumentException exception ) { logger . debug ( exception ) ; throw new CDKException ( "Error while reading products" , exception ) ; } // now try to map things , if wanted logger . info ( "Reading atom-atom mapping from file" ) ; // distribute all atoms over two AtomContainer ' s IAtomContainer reactingSide = builder . newInstance ( IAtomContainer . class ) ; Iterator < IAtomContainer > molecules = reaction . getReactants ( ) . atomContainers ( ) . iterator ( ) ; while ( molecules . hasNext ( ) ) { reactingSide . add ( molecules . next ( ) ) ; } IAtomContainer producedSide = builder . newInstance ( IAtomContainer . class ) ; molecules = reaction . getProducts ( ) . atomContainers ( ) . iterator ( ) ; while ( molecules . hasNext ( ) ) { producedSide . add ( molecules . next ( ) ) ; } // map the atoms int mappingCount = 0 ; // IAtom [ ] reactantAtoms = reactingSide . getAtoms ( ) ; // IAtom [ ] producedAtoms = producedSide . getAtoms ( ) ; for ( int i = 0 ; i < reactingSide . getAtomCount ( ) ; i ++ ) { for ( int j = 0 ; j < producedSide . getAtomCount ( ) ; j ++ ) { IAtom eductAtom = reactingSide . getAtom ( i ) ; IAtom productAtom = producedSide . getAtom ( j ) ; if ( eductAtom . getProperty ( CDKConstants . ATOM_ATOM_MAPPING ) != null && eductAtom . getProperty ( CDKConstants . ATOM_ATOM_MAPPING ) . equals ( productAtom . getProperty ( CDKConstants . ATOM_ATOM_MAPPING ) ) ) { reaction . addMapping ( builder . newInstance ( IMapping . class , eductAtom , productAtom ) ) ; mappingCount ++ ; break ; } } } logger . info ( "Mapped atom pairs: " + mappingCount ) ; return reaction ;
public class AbstractObjectStore { /** * Print a dump of the state . * @ param printWriter to be written to . */ public void print ( java . io . PrintWriter printWriter ) { } }
printWriter . println ( "State Dump for:" + cclass . getName ( ) + " sequenceNumber=" + sequenceNumber + "(int)" + " allocationAllowed=" + allocationAllowed + "(boolean)" + " storeName=" + storeName + "(String)" + "\n objectStoreIdentifier=" + objectStoreIdentifier + "(int)" + " storeStrategy=" + storeStrategy + "(int)" + strategyNames [ storeStrategy ] + "(String)" + " persistent=" + persistent + "(boolean)" + " objectManagerState=" + objectManagerState + "(ObjectManagerState)" ) ; printWriter . println ( ) ; printWriter . println ( "inMemoryTokens..." ) ; for ( java . util . Iterator tokenIterator = inMemoryTokens . values ( ) . iterator ( ) ; tokenIterator . hasNext ( ) ; ) { java . lang . ref . Reference reference = ( java . lang . ref . Reference ) tokenIterator . next ( ) ; if ( reference != null ) { Token token = ( Token ) reference . get ( ) ; if ( token != null ) printWriter . println ( token . toString ( ) ) ; } } // for . . . inMemoryTokens .
public class ConstructorInstrumenter { /** * Bytecode is rewritten to invoke this method ; it calls the sampler for the given class . Note * that , unless the javaagent command line argument " subclassesAlso " is specified , it won ' t do * anything if o is a subclass of the class that was supposed to be tracked . * @ param o the object passed to the samplers . */ @ SuppressWarnings ( "unchecked" ) public static void invokeSamplers ( Object o ) { } }
Class < ? > currentClass = o . getClass ( ) ; while ( currentClass != null ) { List < ConstructorCallback < ? > > samplers = samplerMap . get ( currentClass ) ; if ( samplers != null ) { // Leave in the @ SuppressWarnings , because we define - Werror , // and infrastructure sometimes runs with all warnings turned // on . This would be a great place for a typesafe // heterogeneous container , but that doesn ' t work with generic // types . for ( @ SuppressWarnings ( "rawtypes" ) ConstructorCallback sampler : samplers ) { sampler . sample ( o ) ; } // Return once the first list of registered samplers are found and // invoked . return ; } else { // When subclassesAlso is not specified ( default ) , return if no // samplers are registered with the type of the currently - constructed // object . Otherwise , traverse upward the class hierarchy . if ( ! subclassesAlso ) { return ; } currentClass = currentClass . getSuperclass ( ) ; } }
public class RemotingClient { /** * Decode response received from remoting server . * @ param data * Result data to decode * @ return Object deserialized from byte buffer data */ private Object decodeResult ( IoBuffer data ) { } }
log . debug ( "decodeResult - data limit: {}" , ( data != null ? data . limit ( ) : 0 ) ) ; processHeaders ( data ) ; int count = data . getUnsignedShort ( ) ; if ( count != 1 ) { throw new RuntimeException ( "Expected exactly one result but got " + count ) ; } Input input = new Input ( data ) ; String target = input . getString ( ) ; // expect " / onResult " log . debug ( "Target: {}" , target ) ; String nullString = input . getString ( ) ; // expect " null " log . debug ( "Null string: {}" , nullString ) ; // Read return value return Deserializer . deserialize ( input , Object . class ) ;
public class CheckBase { /** * Pops the top of the stack of active elements if the current position in the call stack corresponds to the one * that pushed the active elements . * < p > This method does not do any type checks , so take care to retrieve the elements with the same types used to push * to them onto the stack . * @ param < T > the type of the elements * @ return the active elements or null if the current call stack did not push any active elements onto the stack */ @ Nullable @ SuppressWarnings ( "unchecked" ) protected < T extends JavaElement > ActiveElements < T > popIfActive ( ) { } }
return ( ActiveElements < T > ) ( ! activations . isEmpty ( ) && activations . peek ( ) . depth == depth ? activations . pop ( ) : null ) ;
public class Session { /** * Stop the session inactivity timer . */ protected void stopInactivityTimer ( ) { } }
try ( Lock ignored = locker . lockIfNotHeld ( ) ) { if ( sessionInactivityTimer != null ) { sessionInactivityTimer . setIdleTimeout ( - 1 ) ; sessionInactivityTimer = null ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Session inactivity timer stopped" ) ; } } }
public class RepositoryConfiguration { /** * Utility method to replace all system property variables found within the specified document . * @ param doc the document ; may not be null * @ return the modified document if system property variables were found , or the < code > doc < / code > instance if no such * variables were found */ protected static Document replaceSystemPropertyVariables ( Document doc ) { } }
if ( doc . isEmpty ( ) ) return doc ; Document modified = doc . withVariablesReplacedWithSystemProperties ( ) ; if ( modified == doc ) return doc ; // Otherwise , we changed some values . Note that the system properties can only be used in // string values , whereas the schema may expect non - string values . Therefore , we need to validate // the document against the schema and possibly perform some conversions of values . . . return SCHEMA_LIBRARY . convertValues ( modified , JSON_SCHEMA_URI ) ;
public class StreamSource { /** * Create a pushable { @ link PushableReactiveSeq } * < pre > * { @ code * PushableReactiveSeq < Integer > pushable = StreamSource . ofUnbounded ( ) * . reactiveSeq ( ) ; * pushable . getInput ( ) * . add ( 10 ) ; * on another thread * pushable . getStream ( ) * . collect ( CyclopsCollectors . toList ( ) ) / / [ 10] * < / pre > * @ return PushableStream that can accept data to push into a { @ see cyclops2 . stream . ReactiveSeq } * to push it to the Stream */ public < T > PushableReactiveSeq < T > reactiveSeq ( ) { } }
final Queue < T > q = createQueue ( ) ; return new PushableReactiveSeq < T > ( q , q . stream ( ) ) ;
public class EstimateSceneCalibrated { /** * Computes the acture angle between two vectors . Larger this angle is the better the triangulation * of the features 3D location is in general */ double triangulationAngle ( Point2D_F64 normA , Point2D_F64 normB , Se3_F64 a_to_b ) { } }
// the more parallel a line is worse the triangulation . Get rid of bad ideas early here arrowA . set ( normA . x , normA . y , 1 ) ; arrowB . set ( normB . x , normB . y , 1 ) ; GeometryMath_F64 . mult ( a_to_b . R , arrowA , arrowA ) ; // put them into the same reference frame return UtilVector3D_F64 . acute ( arrowA , arrowB ) ;
public class DetectEntitiesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DetectEntitiesRequest detectEntitiesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( detectEntitiesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( detectEntitiesRequest . getText ( ) , TEXT_BINDING ) ; protocolMarshaller . marshall ( detectEntitiesRequest . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AjaxHelper { /** * Sets the current AJAX operation details . * @ param operation the current AJAX operation . * @ param trigger the current AJAX operation trigger and its context . */ public static void setCurrentOperationDetails ( final AjaxOperation operation , final ComponentWithContext trigger ) { } }
if ( operation == null ) { THREAD_LOCAL_OPERATION . remove ( ) ; } else { THREAD_LOCAL_OPERATION . set ( operation ) ; } if ( trigger == null ) { THREAD_LOCAL_COMPONENT_WITH_CONTEXT . remove ( ) ; } else { THREAD_LOCAL_COMPONENT_WITH_CONTEXT . set ( trigger ) ; }
public class FLACDecoder { /** * Read an array of metadata blocks . * @ return The array of metadata blocks * @ throws IOException On read error */ public Metadata [ ] readMetadata ( ) throws IOException { } }
readStreamSync ( ) ; ArrayList < Metadata > metadataList = new ArrayList < Metadata > ( ) ; Metadata metadata ; do { metadata = readNextMetadata ( ) ; metadataList . add ( metadata ) ; } while ( ! metadata . isLast ( ) ) ; return ( Metadata [ ] ) metadataList . toArray ( new Metadata [ 0 ] ) ;
public class TypeUtil { /** * Convert String value to instance . * @ param type The class of the instance , which may be a primitive TYPE * field . * @ param value The value as a string . * @ return The value as an Object . */ public static Object valueOf ( Class < ? > type , String value ) { } }
try { if ( type . equals ( java . lang . String . class ) ) { return value ; } Method m = class2Value . get ( type ) ; if ( m != null ) { return m . invoke ( null , value ) ; } if ( type . equals ( java . lang . Character . TYPE ) || type . equals ( java . lang . Character . class ) ) { return new Character ( value . charAt ( 0 ) ) ; } Constructor < ? > c = type . getConstructor ( java . lang . String . class ) ; return c . newInstance ( value ) ; } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException e ) { // LogSupport . ignore ( log , e ) ; } catch ( InvocationTargetException e ) { if ( e . getTargetException ( ) instanceof Error ) { throw ( Error ) ( e . getTargetException ( ) ) ; } } return null ;
public class JobHistoryFileParserBase { /** * extract the string around Xmx in the java child opts " - Xmx1024m - verbose : gc " * @ param javaChildOptsStr * @ return string that represents the Xmx value */ static String extractXmxValueStr ( String javaChildOptsStr ) { } }
if ( StringUtils . isBlank ( javaChildOptsStr ) ) { LOG . info ( "Null/empty input argument to get xmxValue, returning " + Constants . DEFAULT_XMX_SETTING_STR ) ; return Constants . DEFAULT_XMX_SETTING_STR ; } // first split based on " - Xmx " in " - Xmx1024m - verbose : gc " final String JAVA_XMX_PREFIX = "-Xmx" ; String [ ] xmxStr = javaChildOptsStr . split ( JAVA_XMX_PREFIX ) ; if ( xmxStr . length >= 2 ) { // xmxStr [ 0 ] is ' ' // and XmxStr [ 1 ] is " 1024m - verbose : gc " String [ ] valuesStr = xmxStr [ 1 ] . split ( " " ) ; // split on whitespace if ( valuesStr . length >= 1 ) { // now valuesStr [ 0 ] is " 1024m " return valuesStr [ 0 ] ; } else { LOG . info ( "Strange Xmx setting, returning default " + javaChildOptsStr ) ; return Constants . DEFAULT_XMX_SETTING_STR ; } } else { // Xmx is not present in java child opts LOG . info ( "Xmx setting absent, returning default " + javaChildOptsStr ) ; return Constants . DEFAULT_XMX_SETTING_STR ; }
public class ReactiveMongoClientFactory { /** * Creates a { @ link MongoClient } using the given { @ code settings } . If the environment * contains a { @ code local . mongo . port } property , it is used to configure a client to * an embedded MongoDB instance . * @ param settings the settings * @ return the Mongo client */ public MongoClient createMongoClient ( MongoClientSettings settings ) { } }
Integer embeddedPort = getEmbeddedPort ( ) ; if ( embeddedPort != null ) { return createEmbeddedMongoClient ( settings , embeddedPort ) ; } return createNetworkMongoClient ( settings ) ;
public class FirestoreAdminClient { /** * Gets the metadata and configuration for a Field . * < p > Sample code : * < pre > < code > * try ( FirestoreAdminClient firestoreAdminClient = FirestoreAdminClient . create ( ) ) { * FieldName name = FieldName . of ( " [ PROJECT ] " , " [ DATABASE ] " , " [ COLLECTION _ ID ] " , " [ FIELD _ ID ] " ) ; * Field response = firestoreAdminClient . getField ( name . toString ( ) ) ; * < / code > < / pre > * @ param name A name of the form * ` projects / { project _ id } / databases / { database _ id } / collectionGroups / { collection _ id } / fields / { field _ id } ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Field getField ( String name ) { } }
GetFieldRequest request = GetFieldRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getField ( request ) ;
public class CommonOps_DSCC { /** * Performs matrix addition : < br > * C = & alpha ; A + & beta ; B * @ param alpha scalar value multiplied against A * @ param A Matrix * @ param beta scalar value multiplied against B * @ param B Matrix * @ param C Output matrix . * @ param gw ( Optional ) Storage for internal workspace . Can be null . * @ param gx ( Optional ) Storage for internal workspace . Can be null . */ public static void add ( double alpha , DMatrixSparseCSC A , double beta , DMatrixSparseCSC B , DMatrixSparseCSC C , @ Nullable IGrowArray gw , @ Nullable DGrowArray gx ) { } }
if ( A . numRows != B . numRows || A . numCols != B . numCols ) throw new MatrixDimensionException ( "Inconsistent matrix shapes. " + stringShapes ( A , B ) ) ; C . reshape ( A . numRows , A . numCols ) ; ImplCommonOps_DSCC . add ( alpha , A , beta , B , C , gw , gx ) ;
public class ScanningQueryEngine { /** * Create a node sequence containing the results of the original query as defined by the supplied plan . * @ param originalQuery the original query command ; may not be null * @ param context the context in which the query is to be executed ; may not be null * @ param plan the optimized plan for the query ; may not be null * @ param columns the result column definition ; may not be null * @ param sources the query sources for the repository ; may not be null * @ return the sequence of results ; null only if the type of plan is not understood */ protected NodeSequence createNodeSequence ( QueryCommand originalQuery , ScanQueryContext context , PlanNode plan , Columns columns , QuerySources sources ) { } }
NodeSequence rows = null ; final String workspaceName = sources . getWorkspaceName ( ) ; final NodeCache cache = context . getNodeCache ( workspaceName ) ; final TypeSystem types = context . getTypeSystem ( ) ; final BufferManager bufferManager = context . getBufferManager ( ) ; switch ( plan . getType ( ) ) { case ACCESS : // If the ACCESS node is known to never have results . . . if ( plan . hasProperty ( Property . ACCESS_NO_RESULTS ) ) { rows = NodeSequence . emptySequence ( columns . getColumns ( ) . size ( ) ) ; } else { // Create the sequence for the plan node under the the ACCESS node . . . assert plan . getChildCount ( ) == 1 ; rows = createNodeSequence ( originalQuery , context , plan . getFirstChild ( ) , columns , sources ) ; } break ; case DEPENDENT_QUERY : assert plan . getChildCount ( ) == 2 ; // Create the independent query from the left . . . PlanNode indepPlan = plan . getFirstChild ( ) ; Columns indepColumns = context . columnsFor ( indepPlan ) ; String variableName = indepPlan . getProperty ( Property . VARIABLE_NAME , String . class ) ; NodeSequence independent = createNodeSequence ( originalQuery , context , indepPlan , indepColumns , sources ) ; // Create an extractor to get the value specified in the columns . . . Column column = indepColumns . getColumns ( ) . get ( 0 ) ; boolean allowMultiValued = false ; String typeName = indepColumns . getColumnTypeForProperty ( column . getSelectorName ( ) , column . getPropertyName ( ) ) ; TypeFactory < ? > type = context . getTypeSystem ( ) . getTypeFactory ( typeName ) ; ExtractFromRow indepExtractor = createExtractFromRow ( column . getSelectorName ( ) , column . getPropertyName ( ) , context , indepColumns , sources , type , allowMultiValued ) ; // Create the sequence for the dependent query . . . PlanNode depPlan = plan . getLastChild ( ) ; Columns depColumns = context . columnsFor ( depPlan ) ; NodeSequence dependent = createNodeSequence ( originalQuery , context , depPlan , depColumns , sources ) ; // now create the dependent query . . . rows = new DependentQuery ( independent , indepExtractor , type , dependent , variableName , context . getVariables ( ) ) ; break ; case DUP_REMOVE : assert plan . getChildCount ( ) == 1 ; if ( plan . getFirstChild ( ) . getType ( ) == Type . SORT ) { // There is a SORT below this DUP _ REMOVE , and we can do that in one fell swoop with the sort . . . rows = createNodeSequence ( originalQuery , context , plan . getFirstChild ( ) , columns , sources ) ; } else { // Create the sequence for the plan node under the DUP _ REMOVE . . . rows = createNodeSequence ( originalQuery , context , plan . getFirstChild ( ) , columns , sources ) ; if ( ! rows . isEmpty ( ) && ! ( rows instanceof DistinctSequence ) ) { // Wrap that with a sequence that removes duplicates . . . boolean useHeap = false ; rows = new DistinctSequence ( rows , context . getTypeSystem ( ) , context . getBufferManager ( ) , useHeap ) ; } } break ; case GROUP : throw new UnsupportedOperationException ( ) ; case JOIN : // Create the components under the JOIN . . . assert plan . getChildCount ( ) == 2 ; PlanNode leftPlan = plan . getFirstChild ( ) ; PlanNode rightPlan = plan . getLastChild ( ) ; // Define the columns for each side , taken from the supplied columns . . . Columns leftColumns = context . columnsFor ( leftPlan ) ; Columns rightColumns = context . columnsFor ( rightPlan ) ; // Query context for the join ( must remove isExists condition ) . ScanQueryContext joinQueryContext = context ; if ( context . getHints ( ) . isExistsQuery ) { // must not push down a LIMIT 1 condition to joins . PlanHints joinPlanHints = context . getHints ( ) . clone ( ) ; joinPlanHints . isExistsQuery = false ; joinQueryContext = context . with ( joinPlanHints ) ; } NodeSequence left = createNodeSequence ( originalQuery , joinQueryContext , leftPlan , leftColumns , sources ) ; NodeSequence right = createNodeSequence ( originalQuery , joinQueryContext , rightPlan , rightColumns , sources ) ; // Figure out the join algorithm . . . JoinAlgorithm algorithm = plan . getProperty ( Property . JOIN_ALGORITHM , JoinAlgorithm . class ) ; JoinType joinType = plan . getProperty ( Property . JOIN_TYPE , JoinType . class ) ; JoinCondition joinCondition = plan . getProperty ( Property . JOIN_CONDITION , JoinCondition . class ) ; boolean pack = false ; boolean useHeap = false ; if ( 0 >= right . getRowCount ( ) && right . getRowCount ( ) < 100 ) useHeap = true ; ExtractFromRow leftExtractor = null ; ExtractFromRow rightExtractor = null ; RangeProducer < ? > rangeProducer = null ; switch ( algorithm ) { case NESTED_LOOP : // rows = new NestedLoopJoinComponent ( context , left , right , joinCondition , joinType ) ; // break ; case MERGE : if ( joinCondition instanceof SameNodeJoinCondition ) { SameNodeJoinCondition condition = ( SameNodeJoinCondition ) joinCondition ; // check if the JOIN was not reversed by an optimization boolean joinReversed = ! leftColumns . getSelectorNames ( ) . contains ( condition . getSelector1Name ( ) ) ; int leftIndex ; int rightIndex ; if ( joinReversed ) { // figure out the row indexes for the different selectors . . . leftIndex = leftColumns . getSelectorIndex ( condition . getSelector2Name ( ) ) ; rightIndex = rightColumns . getSelectorIndex ( condition . getSelector1Name ( ) ) ; } else { leftIndex = leftColumns . getSelectorIndex ( condition . getSelector1Name ( ) ) ; rightIndex = rightColumns . getSelectorIndex ( condition . getSelector2Name ( ) ) ; } String relativePath = condition . getSelector2Path ( ) ; if ( relativePath != null ) { // Get extractors that will get the path of the nodes . . . PathFactory pathFactory = context . getExecutionContext ( ) . getValueFactories ( ) . getPathFactory ( ) ; Path relPath = pathFactory . create ( relativePath ) ; if ( joinReversed ) { leftExtractor = RowExtractors . extractRelativePath ( leftIndex , relPath , cache , types ) ; rightExtractor = RowExtractors . extractPath ( rightIndex , cache , types ) ; } else { leftExtractor = RowExtractors . extractPath ( leftIndex , cache , types ) ; rightExtractor = RowExtractors . extractRelativePath ( rightIndex , relPath , cache , types ) ; } } else { // The nodes must be the same node . . . leftExtractor = RowExtractors . extractNodeKey ( leftIndex , cache , types ) ; rightExtractor = RowExtractors . extractNodeKey ( rightIndex , cache , types ) ; } } else if ( joinCondition instanceof ChildNodeJoinCondition ) { ChildNodeJoinCondition condition = ( ChildNodeJoinCondition ) joinCondition ; // check if the JOIN was not reversed by an optimization boolean joinReversed = ! leftColumns . getSelectorNames ( ) . contains ( condition . getParentSelectorName ( ) ) ; if ( joinReversed ) { int leftIndex = leftColumns . getSelectorIndex ( condition . getChildSelectorName ( ) ) ; int rightIndex = rightColumns . getSelectorIndex ( condition . getParentSelectorName ( ) ) ; leftExtractor = RowExtractors . extractParentNodeKey ( leftIndex , cache , types ) ; rightExtractor = RowExtractors . extractNodeKey ( rightIndex , cache , types ) ; } else { int leftIndex = leftColumns . getSelectorIndex ( condition . getParentSelectorName ( ) ) ; int rightIndex = rightColumns . getSelectorIndex ( condition . getChildSelectorName ( ) ) ; leftExtractor = RowExtractors . extractNodeKey ( leftIndex , cache , types ) ; rightExtractor = RowExtractors . extractParentNodeKey ( rightIndex , cache , types ) ; } } else if ( joinCondition instanceof EquiJoinCondition ) { EquiJoinCondition condition = ( EquiJoinCondition ) joinCondition ; // check if the JOIN was not reversed by an optimization boolean joinReversed = ! leftColumns . getSelectorNames ( ) . contains ( condition . getSelector1Name ( ) ) ; String sel1 = condition . getSelector1Name ( ) ; String sel2 = condition . getSelector2Name ( ) ; String prop1 = condition . getProperty1Name ( ) ; String prop2 = condition . getProperty2Name ( ) ; if ( joinReversed ) { leftExtractor = createExtractFromRow ( sel2 , prop2 , joinQueryContext , leftColumns , sources , null , true ) ; rightExtractor = createExtractFromRow ( sel1 , prop1 , joinQueryContext , rightColumns , sources , null , true ) ; } else { leftExtractor = createExtractFromRow ( sel1 , prop1 , joinQueryContext , leftColumns , sources , null , true ) ; rightExtractor = createExtractFromRow ( sel2 , prop2 , joinQueryContext , rightColumns , sources , null , true ) ; } } else if ( joinCondition instanceof DescendantNodeJoinCondition ) { DescendantNodeJoinCondition condition = ( DescendantNodeJoinCondition ) joinCondition ; // For this to work , we want the ancestors to be on the left , so that the descendants can quickly // be found given a path of each ancestor . . . assert leftColumns . getSelectorNames ( ) . contains ( condition . getAncestorSelectorName ( ) ) ; String ancestorSelector = condition . getAncestorSelectorName ( ) ; String descendantSelector = condition . getDescendantSelectorName ( ) ; int ancestorSelectorIndex = leftColumns . getSelectorIndex ( ancestorSelector ) ; int descendantSelectorIndex = rightColumns . getSelectorIndex ( descendantSelector ) ; leftExtractor = RowExtractors . extractPath ( ancestorSelectorIndex , cache , types ) ; rightExtractor = RowExtractors . extractPath ( descendantSelectorIndex , cache , types ) ; // This is the only time we need a RangeProducer . . . final PathFactory paths = context . getExecutionContext ( ) . getValueFactories ( ) . getPathFactory ( ) ; rangeProducer = new RangeProducer < Path > ( ) { @ Override public Range < Path > getRange ( Path leftPath ) { if ( leftPath . isRoot ( ) ) { // All paths are descendants of the root return new Range < > ( leftPath , false , null , true ) ; } // Given the path of the node on the left side of the join , find the range of all paths // that might be considered descendants of the left path . . . . boolean includeLower = false ; // we don ' t want to include the left node ; only descendants // The upper bound path is the same as the left path , just with an incremented SNS . . . Path . Segment lastSegment = leftPath . getLastSegment ( ) ; Path . Segment upperSegment = paths . createSegment ( lastSegment . getName ( ) , lastSegment . getIndex ( ) + 1 ) ; Path upperBoundPath = paths . create ( leftPath . getParent ( ) , upperSegment ) ; return new Range < > ( leftPath , includeLower , upperBoundPath , false ) ; } } ; } else { assert false : "Unable to use merge algorithm with join conditions: " + joinCondition ; throw new UnsupportedOperationException ( ) ; } break ; } // Perform conversion if required . . . assert leftExtractor != null ; assert rightExtractor != null ; TypeFactory < ? > leftType = leftExtractor . getType ( ) ; TypeFactory < ? > rightType = rightExtractor . getType ( ) ; if ( ! leftType . equals ( rightType ) ) { // wrap the right extractor with a converting extractor . . . final TypeFactory < ? > commonType = context . getTypeSystem ( ) . getCompatibleType ( leftType , rightType ) ; if ( ! leftType . equals ( commonType ) ) leftExtractor = RowExtractors . convert ( leftExtractor , commonType ) ; if ( ! rightType . equals ( commonType ) ) rightExtractor = RowExtractors . convert ( rightExtractor , commonType ) ; } rows = new HashJoinSequence ( workspaceName , left , right , leftExtractor , rightExtractor , joinType , context . getBufferManager ( ) , cache , rangeProducer , pack , useHeap ) ; // For each Constraint object applied to the JOIN , simply create a SelectComponent on top . . . RowFilter filter = null ; List < Constraint > constraints = plan . getPropertyAsList ( Property . JOIN_CONSTRAINTS , Constraint . class ) ; if ( constraints != null ) { for ( Constraint constraint : constraints ) { RowFilter constraintFilter = createRowFilter ( constraint , context , columns , sources ) ; filter = NodeSequence . requireBoth ( filter , constraintFilter ) ; } } rows = NodeSequence . filter ( rows , filter ) ; // even if filter is null break ; case LIMIT : // Create the sequence for the plan node under the LIMIT . . . assert plan . getChildCount ( ) == 1 ; rows = createNodeSequence ( originalQuery , context , plan . getFirstChild ( ) , columns , sources ) ; // Calculate the limit . . . Integer rowLimit = plan . getProperty ( Property . LIMIT_COUNT , Integer . class ) ; Integer offset = plan . getProperty ( Property . LIMIT_OFFSET , Integer . class ) ; Limit limit = Limit . NONE ; if ( rowLimit != null ) limit = limit . withRowLimit ( rowLimit . intValue ( ) ) ; if ( offset != null ) limit = limit . withOffset ( offset . intValue ( ) ) ; // Then create the limited sequence . . . if ( ! limit . isUnlimited ( ) ) { rows = NodeSequence . limit ( rows , limit ) ; } break ; case NULL : // No results . . . rows = NodeSequence . emptySequence ( columns . getColumns ( ) . size ( ) ) ; break ; case PROJECT : // Nothing to do , since the projected columns will be accessed as needed when the results are processed . Instead , // just process the PROJECT node ' s only child . . . PlanNode child = plan . getFirstChild ( ) ; columns = context . columnsFor ( child ) ; rows = createNodeSequence ( originalQuery , context , child , columns , sources ) ; break ; case SELECT : // Create the sequence for the plan node under the SELECT . . . assert plan . getChildCount ( ) == 1 ; rows = createNodeSequence ( originalQuery , context , plan . getFirstChild ( ) , columns , sources ) ; Constraint constraint = plan . getProperty ( Property . SELECT_CRITERIA , Constraint . class ) ; filter = createRowFilter ( constraint , context , columns , sources ) ; rows = NodeSequence . filter ( rows , filter ) ; break ; case SET_OPERATION : Operation operation = plan . getProperty ( Property . SET_OPERATION , Operation . class ) ; boolean all = plan . getProperty ( Property . SET_USE_ALL , Boolean . class ) ; PlanNode firstPlan = plan . getFirstChild ( ) ; PlanNode secondPlan = plan . getLastChild ( ) ; Columns firstColumns = context . columnsFor ( firstPlan ) ; Columns secondColumns = context . columnsFor ( secondPlan ) ; NodeSequence first = createNodeSequence ( originalQuery , context , firstPlan , firstColumns , sources ) ; NodeSequence second = createNodeSequence ( originalQuery , context , secondPlan , secondColumns , sources ) ; useHeap = 0 >= second . getRowCount ( ) && second . getRowCount ( ) < 100 ; if ( first . width ( ) != second . width ( ) ) { // A set operation requires that the ' first ' and ' second ' sequences have the same width , but this is // not necessarily the case ( e . g . , when one side involves a JOIN but the other does not ) . The columns // will dictate which subset of selector indexes in the sequences should be used . first = NodeSequence . slice ( first , firstColumns ) ; second = NodeSequence . slice ( second , secondColumns ) ; assert first . width ( ) == second . width ( ) ; } pack = false ; switch ( operation ) { case UNION : { // If one of them is empty , return the other . . . if ( first . isEmpty ( ) ) return second ; if ( second . isEmpty ( ) ) return first ; // This is really just a sequence with the two parts . . . rows = NodeSequence . append ( first , second ) ; break ; } case INTERSECT : { // If one of them is empty , there are no results . . . if ( first . isEmpty ( ) ) return first ; if ( second . isEmpty ( ) ) return second ; rows = new IntersectSequence ( workspaceName , first , second , types , bufferManager , cache , pack , useHeap ) ; break ; } case EXCEPT : { // If the second is empty , there ' s nothing to exclude . . . if ( second . isEmpty ( ) ) return first ; rows = new ExceptSequence ( workspaceName , first , second , types , bufferManager , cache , pack , useHeap ) ; break ; } } if ( ! all ) { useHeap = false ; rows = new DistinctSequence ( rows , context . getTypeSystem ( ) , context . getBufferManager ( ) , useHeap ) ; } break ; case SORT : assert plan . getChildCount ( ) == 1 ; PlanNode delegate = plan . getFirstChild ( ) ; boolean allowDuplicates = true ; if ( delegate . getType ( ) == Type . DUP_REMOVE ) { // This SORT already removes duplicates , so we can skip the first child . . . delegate = delegate . getFirstChild ( ) ; allowDuplicates = false ; } PlanNode parent = plan . getParent ( ) ; if ( parent != null && parent . getType ( ) == Type . DUP_REMOVE ) { // The parent is a DUP _ REMOVE ( shouldn ' t really happen in an optimized plan ) , we should disallow duplicates allowDuplicates = false ; } // Create the sequence for the delegate plan node . . . rows = createNodeSequence ( originalQuery , context , delegate , columns , sources ) ; if ( ! rows . isEmpty ( ) ) { // Prepare to wrap this delegate sequence based upon the SORT _ ORDER _ BY . . . List < Object > orderBys = plan . getPropertyAsList ( Property . SORT_ORDER_BY , Object . class ) ; if ( ! orderBys . isEmpty ( ) ) { // Create an extractor from the orderings that we ' ll use for the sorting . . . ExtractFromRow sortExtractor = null ; pack = false ; useHeap = false ; NullOrder nullOrder = null ; if ( orderBys . get ( 0 ) instanceof Ordering ) { List < Ordering > orderings = new ArrayList < Ordering > ( orderBys . size ( ) ) ; for ( Object orderBy : orderBys ) { orderings . add ( ( Ordering ) orderBy ) ; } // Determine the alias - to - name mappings for the selectors in the orderings . . . Map < SelectorName , SelectorName > sourceNamesByAlias = new HashMap < SelectorName , SelectorName > ( ) ; for ( PlanNode source : plan . findAllAtOrBelow ( Type . SOURCE ) ) { SelectorName name = source . getProperty ( Property . SOURCE_NAME , SelectorName . class ) ; SelectorName alias = source . getProperty ( Property . SOURCE_ALIAS , SelectorName . class ) ; if ( alias != null ) sourceNamesByAlias . put ( alias , name ) ; } // If there are multiple orderings , then we ' ll never have nulls . But if there is just one ordering , // we have to handle nulls . . . if ( orderings . size ( ) == 1 ) { nullOrder = orderings . get ( 0 ) . nullOrder ( ) ; } // Now create the single sorting extractor . . . sortExtractor = createSortingExtractor ( orderings , sourceNamesByAlias , context , columns , sources ) ; } else { // Order by the location ( s ) because it ' s before a merge - join . . . final TypeFactory < ? > keyType = context . getTypeSystem ( ) . getReferenceFactory ( ) ; List < ExtractFromRow > extractors = new ArrayList < > ( ) ; for ( Object ordering : orderBys ) { SelectorName selectorName = ( SelectorName ) ordering ; final int index = columns . getSelectorIndex ( selectorName . name ( ) ) ; extractors . add ( new ExtractFromRow ( ) { @ Override public TypeFactory < ? > getType ( ) { return keyType ; } @ Override public Object getValueInRow ( RowAccessor row ) { CachedNode node = row . getNode ( index ) ; return node != null ? node . getKey ( ) : null ; } } ) ; } // This is jsut for a merge join , so use standard null ordering . . . nullOrder = NullOrder . NULLS_LAST ; // Now create the single sorting extractor . . . sortExtractor = RowExtractors . extractorWith ( extractors ) ; } // Now create the sorting sequence . . . if ( sortExtractor != null ) { rows = new SortingSequence ( workspaceName , rows , sortExtractor , bufferManager , cache , pack , useHeap , allowDuplicates , nullOrder ) ; } } } break ; case SOURCE : // Otherwise , just grab all of the nodes . . . rows = createNodeSequenceForSource ( originalQuery , context , plan , columns , sources ) ; break ; default : break ; } return rows ;
public class ExtendedBufferedReader { /** * Reads all characters up to ( but not including ) the given character . * @ param c the character to read up to * @ return the string up to the character < code > c < / code > * @ throws IOException */ public String readUntil ( char c ) throws IOException { } }
if ( lookaheadChar == UNDEFINED ) { lookaheadChar = super . read ( ) ; } line . clear ( ) ; // reuse while ( lookaheadChar != c && lookaheadChar != END_OF_STREAM ) { line . append ( ( char ) lookaheadChar ) ; if ( lookaheadChar == '\n' ) { lineCounter ++ ; } lastChar = lookaheadChar ; lookaheadChar = super . read ( ) ; } return line . toString ( ) ;
public class Pql { /** * Creates a { @ link Value } from the value i . e . a { @ link TextValue } for a value of type { @ code * String } , { @ link BooleanValue } for type { @ code Boolean } , { @ link NumberValue } for type { @ code * Double } , { @ code Long } , or { @ code Integer } , { @ link DateTimeValue } for type { @ link DateTime } , and * { @ link DateValue } for type { @ link Date } . If the value is a { @ code Value } , the value is * returned . If the value is { @ code null } , an empty { @ link TextValue } is returned . * @ param value the value to convert * @ return the constructed value of the appropriate type * @ throws IllegalArgumentException if value cannot be converted */ public static Value createValue ( Object value ) { } }
if ( value instanceof Value ) { return ( Value ) value ; } else if ( value == null ) { return new TextValue ( ) ; } else { if ( value instanceof Boolean ) { BooleanValue booleanValue = new BooleanValue ( ) ; booleanValue . setValue ( ( Boolean ) value ) ; return booleanValue ; } else if ( value instanceof Double || value instanceof Long || value instanceof Integer ) { NumberValue numberValue = new NumberValue ( ) ; numberValue . setValue ( value . toString ( ) ) ; return numberValue ; } else if ( value instanceof String ) { TextValue textValue = new TextValue ( ) ; textValue . setValue ( ( String ) value ) ; return textValue ; } else if ( value instanceof DateTime ) { DateTimeValue dateTimeValue = new DateTimeValue ( ) ; dateTimeValue . setValue ( ( DateTime ) value ) ; return dateTimeValue ; } else if ( value instanceof Date ) { DateValue dateValue = new DateValue ( ) ; dateValue . setValue ( ( Date ) value ) ; return dateValue ; } else if ( value instanceof Targeting ) { TargetingValue targetingValue = new TargetingValue ( ) ; targetingValue . setValue ( ( Targeting ) value ) ; return targetingValue ; } else if ( value instanceof Set < ? > ) { SetValue setValue = new SetValue ( ) ; Set < Value > values = new LinkedHashSet < Value > ( ) ; for ( Object entry : ( Set < ? > ) value ) { validateSetValueEntryForSet ( createValue ( entry ) , values ) ; values . add ( createValue ( entry ) ) ; } setValue . setValues ( values . toArray ( new Value [ ] { } ) ) ; return setValue ; } else { throw new IllegalArgumentException ( "Unsupported Value type [" + value . getClass ( ) + "]" ) ; } }
public class ULocale { /** * < strong > [ icu ] < / strong > Based on a HTTP formatted list of acceptable locales , determine an available * locale for the user . NullPointerException is thrown if acceptLanguageList or * availableLocales is null . If fallback is non - null , it will contain true if a * fallback locale ( one not in the acceptLanguageList ) was returned . The value on * entry is ignored . ULocale will be one of the locales in availableLocales , or the * ROOT ULocale if if a ROOT locale was used as a fallback ( because nothing else in * availableLocales matched ) . No ULocale array element should be null ; behavior is * undefined if this is the case . * @ param acceptLanguageList list in HTTP " Accept - Language : " format of acceptable locales * @ param availableLocales list of available locales . One of these will be returned . * @ param fallback if non - null , a 1 - element array containing a boolean to be set with * the fallback status * @ return one of the locales from the availableLocales list , or null if none match */ public static ULocale acceptLanguage ( String acceptLanguageList , ULocale [ ] availableLocales , boolean [ ] fallback ) { } }
if ( acceptLanguageList == null ) { throw new NullPointerException ( ) ; } ULocale acceptList [ ] = null ; try { acceptList = parseAcceptLanguage ( acceptLanguageList , true ) ; } catch ( ParseException pe ) { acceptList = null ; } if ( acceptList == null ) { return null ; } return acceptLanguage ( acceptList , availableLocales , fallback ) ;
public class FrameBufferHA { /** * This is called from ( Android ) < code > MapView . onDraw ( ) < / code > . */ @ Override public void draw ( GraphicContext graphicContext ) { } }
graphicContext . fillColor ( this . displayModel . getBackgroundColor ( ) ) ; // Swap bitmaps before the Canvas . drawBitmap to prevent flickering as much as possible swapBitmaps ( ) ; if ( this . odBitmap != null ) { graphicContext . drawBitmap ( this . odBitmap , this . matrix ) ; }
public class AbstractLayoutManager { /** * set up properly the final JRStyle of the column element ( for detail band ) upon condition style and odd - background * @ param jrstyle * @ param column */ private void setUpConditionStyles ( JRDesignStyle jrstyle , AbstractColumn column ) { } }
if ( getReport ( ) . getOptions ( ) . isPrintBackgroundOnOddRows ( ) && Utils . isEmpty ( column . getConditionalStyles ( ) ) ) { JRDesignExpression expression = new JRDesignExpression ( ) ; expression . setValueClass ( Boolean . class ) ; expression . setText ( EXPRESSION_TRUE_WHEN_ODD ) ; Style oddRowBackgroundStyle = getReport ( ) . getOptions ( ) . getOddRowBackgroundStyle ( ) ; JRDesignConditionalStyle condStyle = new JRDesignConditionalStyle ( ) ; condStyle . setBackcolor ( oddRowBackgroundStyle . getBackgroundColor ( ) ) ; condStyle . setMode ( ModeEnum . OPAQUE ) ; condStyle . setConditionExpression ( expression ) ; jrstyle . addConditionalStyle ( condStyle ) ; return ; } if ( Utils . isEmpty ( column . getConditionalStyles ( ) ) ) return ; for ( ConditionalStyle condition : column . getConditionalStyles ( ) ) { if ( getReport ( ) . getOptions ( ) . isPrintBackgroundOnOddRows ( ) && Transparency . TRANSPARENT == condition . getStyle ( ) . getTransparency ( ) ) { // condition style + odd row ( only if conditional style ' s background is transparent ) JRDesignExpression expressionForConditionalStyle = ExpressionUtils . getExpressionForConditionalStyle ( condition , column . getTextForExpression ( ) ) ; String expStr = JRExpressionUtil . getExpressionText ( expressionForConditionalStyle ) ; // ODD JRDesignExpression expressionOdd = new JRDesignExpression ( ) ; expressionOdd . setValueClass ( Boolean . class ) ; expressionOdd . setText ( "new java.lang.Boolean(" + EXPRESSION_TRUE_WHEN_ODD + ".booleanValue() && ((java.lang.Boolean)" + expStr + ").booleanValue() )" ) ; Style oddRowBackgroundStyle = getReport ( ) . getOptions ( ) . getOddRowBackgroundStyle ( ) ; JRDesignConditionalStyle condStyleOdd = makeConditionalStyle ( condition . getStyle ( ) ) ; // Utils . copyProperties ( condStyleOdd , condition . getStyle ( ) . transform ( ) ) ; condStyleOdd . setBackcolor ( oddRowBackgroundStyle . getBackgroundColor ( ) ) ; condStyleOdd . setMode ( ModeEnum . OPAQUE ) ; condStyleOdd . setConditionExpression ( expressionOdd ) ; jrstyle . addConditionalStyle ( condStyleOdd ) ; // EVEN JRDesignExpression expressionEven = new JRDesignExpression ( ) ; expressionEven . setValueClass ( Boolean . class ) ; expressionEven . setText ( "new java.lang.Boolean(" + EXPRESSION_TRUE_WHEN_EVEN + ".booleanValue() && ((java.lang.Boolean)" + expStr + ").booleanValue() )" ) ; JRDesignConditionalStyle condStyleEven = makeConditionalStyle ( condition . getStyle ( ) ) ; condStyleEven . setConditionExpression ( expressionEven ) ; jrstyle . addConditionalStyle ( condStyleEven ) ; } else { // No odd row , just the conditional style JRDesignExpression expression = ExpressionUtils . getExpressionForConditionalStyle ( condition , column . getTextForExpression ( ) ) ; JRDesignConditionalStyle condStyle = makeConditionalStyle ( condition . getStyle ( ) ) ; condStyle . setConditionExpression ( expression ) ; jrstyle . addConditionalStyle ( condStyle ) ; } } // The last condition is the basic one // ODD if ( getReport ( ) . getOptions ( ) . isPrintBackgroundOnOddRows ( ) ) { JRDesignExpression expressionOdd = new JRDesignExpression ( ) ; expressionOdd . setValueClass ( Boolean . class ) ; expressionOdd . setText ( EXPRESSION_TRUE_WHEN_ODD ) ; Style oddRowBackgroundStyle = getReport ( ) . getOptions ( ) . getOddRowBackgroundStyle ( ) ; JRDesignConditionalStyle condStyleOdd = new JRDesignConditionalStyle ( ) ; condStyleOdd . setBackcolor ( oddRowBackgroundStyle . getBackgroundColor ( ) ) ; condStyleOdd . setMode ( ModeEnum . OPAQUE ) ; condStyleOdd . setConditionExpression ( expressionOdd ) ; jrstyle . addConditionalStyle ( condStyleOdd ) ; // EVEN JRDesignExpression expressionEven = new JRDesignExpression ( ) ; expressionEven . setValueClass ( Boolean . class ) ; expressionEven . setText ( EXPRESSION_TRUE_WHEN_EVEN ) ; JRDesignConditionalStyle condStyleEven = new JRDesignConditionalStyle ( ) ; condStyleEven . setBackcolor ( jrstyle . getBackcolor ( ) ) ; condStyleEven . setMode ( jrstyle . getModeValue ( ) ) ; condStyleEven . setConditionExpression ( expressionEven ) ; jrstyle . addConditionalStyle ( condStyleEven ) ; }
public class FloatArrays { /** * Gets the number of times a given value occurs in an array . */ public static int count ( float [ ] array , float val ) { } }
int count = 0 ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( array [ i ] == val ) { count ++ ; } } return count ;
public class IOUtils { /** * < p > uniquePath . < / p > * @ param name a { @ link java . lang . String } object . * @ param ext a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . * @ throws java . io . IOException if any . */ public static String uniquePath ( String name , String ext ) throws IOException { } }
File file = File . createTempFile ( name , ext ) ; String path = file . getAbsolutePath ( ) ; file . delete ( ) ; return path ;
public class Config { /** * Initialize the Config object . This method should only be called once . */ private void init ( ) { } }
if ( properties != null ) { return ; } LOGGER . info ( "Initializing Configuration" ) ; properties = new Properties ( ) ; final String alpineAppProp = PathUtil . resolve ( System . getProperty ( ALPINE_APP_PROP ) ) ; if ( StringUtils . isNotBlank ( alpineAppProp ) ) { LOGGER . info ( "Loading application properties from " + alpineAppProp ) ; try ( InputStream fileInputStream = Files . newInputStream ( ( new File ( alpineAppProp ) ) . toPath ( ) ) ) { properties . load ( fileInputStream ) ; } catch ( FileNotFoundException e ) { LOGGER . error ( "Could not find property file " + alpineAppProp ) ; } catch ( IOException e ) { LOGGER . error ( "Unable to load " + alpineAppProp ) ; } } else { LOGGER . info ( "System property " + ALPINE_APP_PROP + " not specified" ) ; LOGGER . info ( "Loading " + PROP_FILE + " from classpath" ) ; try ( InputStream in = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( PROP_FILE ) ) { properties . load ( in ) ; } catch ( IOException e ) { LOGGER . error ( "Unable to load " + PROP_FILE ) ; } } if ( properties . size ( ) == 0 ) { LOGGER . error ( "A fatal error occurred loading application properties. Please correct the issue and restart the application." ) ; } alpineVersionProperties = new Properties ( ) ; try ( InputStream in = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( ALPINE_VERSION_PROP_FILE ) ) { alpineVersionProperties . load ( in ) ; } catch ( IOException e ) { LOGGER . error ( "Unable to load " + ALPINE_VERSION_PROP_FILE ) ; } if ( alpineVersionProperties . size ( ) == 0 ) { LOGGER . error ( "A fatal error occurred loading Alpine version information. Please correct the issue and restart the application." ) ; } applicationVersionProperties = new Properties ( ) ; try ( InputStream in = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( APPLICATION_VERSION_PROP_FILE ) ) { applicationVersionProperties . load ( in ) ; } catch ( IOException e ) { LOGGER . error ( "Unable to load " + APPLICATION_VERSION_PROP_FILE ) ; } if ( applicationVersionProperties . size ( ) == 0 ) { LOGGER . error ( "A fatal error occurred loading application version information. Please correct the issue and restart the application." ) ; }
public class SqlTableAlert { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableAlert # update ( int , java . lang . String , int , int , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , int , int , int ) */ @ Override public synchronized void update ( int alertId , String alert , int risk , int confidence , String description , String uri , String param , String attack , String otherInfo , String solution , String reference , String evidence , int cweId , int wascId , int sourceHistoryId ) throws DatabaseException { } }
SqlPreparedStatementWrapper psUpdate = null ; try { psUpdate = DbSQL . getSingleton ( ) . getPreparedStatement ( "alert.ps.update" ) ; psUpdate . getPs ( ) . setString ( 1 , alert ) ; psUpdate . getPs ( ) . setInt ( 2 , risk ) ; psUpdate . getPs ( ) . setInt ( 3 , confidence ) ; psUpdate . getPs ( ) . setString ( 4 , description ) ; psUpdate . getPs ( ) . setString ( 5 , uri ) ; psUpdate . getPs ( ) . setString ( 6 , param ) ; psUpdate . getPs ( ) . setString ( 7 , attack ) ; psUpdate . getPs ( ) . setString ( 8 , otherInfo ) ; psUpdate . getPs ( ) . setString ( 9 , solution ) ; psUpdate . getPs ( ) . setString ( 10 , reference ) ; psUpdate . getPs ( ) . setString ( 11 , evidence ) ; psUpdate . getPs ( ) . setInt ( 12 , cweId ) ; psUpdate . getPs ( ) . setInt ( 13 , wascId ) ; psUpdate . getPs ( ) . setInt ( 14 , sourceHistoryId ) ; psUpdate . getPs ( ) . setInt ( 15 , alertId ) ; psUpdate . getPs ( ) . executeUpdate ( ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; } finally { DbSQL . getSingleton ( ) . releasePreparedStatement ( psUpdate ) ; }
public class Pdf417 { /** * Adds the Macro PDF417 control block codewords ( if any ) . */ private void addMacroCodewords ( ) { } }
// if the structured append series size is 1 , this isn ' t // actually part of a structured append series if ( structuredAppendTotal == 1 ) { return ; } // add the Macro marker codeword codeWords [ codeWordCount ++ ] = 928 ; // add the segment index , padded with leading zeros to five digits // use numeric compaction , but no latch int segmentIndex = structuredAppendPosition - 1 ; int [ ] data = new int [ 5 ] ; for ( int x = data . length - 1 ; x >= 0 ; x -- ) { data [ x ] = '0' + ( segmentIndex % 10 ) ; segmentIndex /= 10 ; } processNumbers ( data , 0 , data . length , true ) ; // add the file ID ( base 900 , which is easy since we limit // file ID values to the range 0 to 899) codeWords [ codeWordCount ++ ] = structuredAppendFileId ; // NOTE : we could add the optional segment count field here , but // it doesn ' t appear to be necessary . . . if we do eventually decide // to add it , it will probably be [ 923 , 001 , count1 , count2] // add the terminator to the last symbol of the series boolean last = ( structuredAppendPosition == structuredAppendTotal ) ; if ( last ) { codeWords [ codeWordCount ++ ] = 922 ; }
public class TableUtils { /** * Issue the database statements to drop the table associated with a dao . * @ param dao * Associated dao . * @ return The number of statements executed to do so . */ public static < T , ID > int dropTable ( Dao < T , ID > dao , boolean ignoreErrors ) throws SQLException { } }
ConnectionSource connectionSource = dao . getConnectionSource ( ) ; Class < T > dataClass = dao . getDataClass ( ) ; DatabaseType databaseType = connectionSource . getDatabaseType ( ) ; if ( dao instanceof BaseDaoImpl < ? , ? > ) { return doDropTable ( databaseType , connectionSource , ( ( BaseDaoImpl < ? , ? > ) dao ) . getTableInfo ( ) , ignoreErrors ) ; } else { TableInfo < T , ID > tableInfo = new TableInfo < T , ID > ( databaseType , dataClass ) ; return doDropTable ( databaseType , connectionSource , tableInfo , ignoreErrors ) ; }
public class AbstractArithmeticTransform { /** * ~ Methods * * * * * */ @ Override public List < Metric > transform ( QueryContext context , List < Metric > metrics ) { } }
if ( metrics == null ) { throw new MissingDataException ( "The metrics list cannot be null or empty while performing arithmetic transformations." ) ; } if ( metrics . isEmpty ( ) ) { return metrics ; } Metric result = new Metric ( getResultScopeName ( ) , RESULT_METRIC_NAME ) ; Map < Long , Double > resultDatapoints = new HashMap < > ( ) ; Iterator < Entry < Long , Double > > it = metrics . get ( 0 ) . getDatapoints ( ) . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Entry < Long , Double > entry = it . next ( ) ; List < Double > operands = null ; try { operands = getOperands ( entry . getKey ( ) , metrics ) ; } catch ( MissingDataException mde ) { continue ; } resultDatapoints . put ( entry . getKey ( ) , performOperation ( operands ) ) ; } result . setDatapoints ( resultDatapoints ) ; MetricDistiller . setCommonAttributes ( metrics , result ) ; List < Metric > resultMetrics = new ArrayList < > ( ) ; Collections . addAll ( resultMetrics , result ) ; return resultMetrics ;
public class Operator { /** * Equivalence : Return True if both operands are True or both are False . The EQV operator is the * opposite of the XOR operator . For example , True EQV True is True , but True EQV False is False . * @ param left value to check * @ param right value to check * @ return result of operation */ public static boolean eqv ( boolean left , boolean right ) { } }
return ( left == true && right == true ) || ( left == false && right == false ) ;
public class DatabaseAccountsInner { /** * Online the specified region for the specified Azure Cosmos DB database account . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param region Cosmos DB region , with spaces between words and each word capitalized . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > beginOnlineRegionAsync ( String resourceGroupName , String accountName , String region ) { } }
return beginOnlineRegionWithServiceResponseAsync ( resourceGroupName , accountName , region ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class NetworkUtils { /** * Rebuild the updater state after a learning rate change . * With updaters like Adam , they have 2 components . . . m and v array , for a total updater state size of 2 * numParams . * Because we combine across parameters and layers where possible ( smaller number of larger operations - > more efficient ) * we can sometimes need to rearrange the updater state array . * For example , if the original updater state for Adam is organized like [ mParam1 , mParam2 , vParam1 , vParam2 ] in one block * and we change the learning rate for one of the layers , param 1 and param2 now belong to different updater blocks . * Consequently , we need to rearrange the updater state to be like [ mParam1 ] [ vParam1 ] in block 1 , [ mParam2 ] [ vParam2 ] in block 2 * @ param origUpdaterState Original updater state view array * @ param orig Original updater blocks * @ param newUpdater New updater blocks * @ return New state view array */ protected static INDArray rebuildUpdaterStateArray ( INDArray origUpdaterState , List < UpdaterBlock > orig , List < UpdaterBlock > newUpdater ) { } }
if ( origUpdaterState == null ) return origUpdaterState ; // First : check if there has been any change in the updater blocks to warrant rearranging the updater state view array if ( orig . size ( ) == newUpdater . size ( ) ) { boolean allEq = true ; for ( int i = 0 ; i < orig . size ( ) ; i ++ ) { UpdaterBlock ub1 = orig . get ( i ) ; UpdaterBlock ub2 = newUpdater . get ( i ) ; if ( ! ub1 . getLayersAndVariablesInBlock ( ) . equals ( ub2 . getLayersAndVariablesInBlock ( ) ) ) { allEq = false ; break ; } } if ( allEq ) { return origUpdaterState ; } } Map < String , List < INDArray > > stateViewsPerParam = new HashMap < > ( ) ; for ( UpdaterBlock ub : orig ) { List < UpdaterBlock . ParamState > params = ub . getLayersAndVariablesInBlock ( ) ; int blockPStart = ub . getParamOffsetStart ( ) ; int blockPEnd = ub . getParamOffsetEnd ( ) ; int blockUStart = ub . getUpdaterViewOffsetStart ( ) ; int blockUEnd = ub . getUpdaterViewOffsetEnd ( ) ; int paramsMultiplier = ( blockUEnd - blockUStart ) / ( blockPEnd - blockPStart ) ; // Updater state length should be exactly 0 , 1 , 2 or 3x number of params INDArray updaterView = ub . getUpdaterView ( ) ; long nParamsInBlock = blockPEnd - blockPStart ; long soFar = 0 ; for ( int sub = 0 ; sub < paramsMultiplier ; sub ++ ) { // subsetUpdaterView : [ m0 , m1 , m2 ] etc INDArray subsetUpdaterView = updaterView . get ( NDArrayIndex . interval ( 0 , 0 , true ) , NDArrayIndex . interval ( soFar , soFar + nParamsInBlock ) ) ; long offsetWithinSub = 0 ; for ( UpdaterBlock . ParamState ps : params ) { int idx = getId ( ps . getLayer ( ) ) ; String paramName = idx + "_" + ps . getParamName ( ) ; INDArray pv = ps . getParamView ( ) ; long nParamsThisParam = pv . length ( ) ; INDArray currSplit = subsetUpdaterView . get ( NDArrayIndex . interval ( 0 , 0 , true ) , NDArrayIndex . interval ( offsetWithinSub , offsetWithinSub + nParamsThisParam ) ) ; if ( ! stateViewsPerParam . containsKey ( paramName ) ) stateViewsPerParam . put ( paramName , new ArrayList < INDArray > ( ) ) ; stateViewsPerParam . get ( paramName ) . add ( currSplit ) ; offsetWithinSub += nParamsThisParam ; } soFar += nParamsInBlock ; } } // Now that we ' ve got updater state per param , we need to reconstruct it in an order suitable for the new updater blocks . . . List < INDArray > toConcat = new ArrayList < > ( ) ; for ( UpdaterBlock ub : newUpdater ) { List < UpdaterBlock . ParamState > ps = ub . getLayersAndVariablesInBlock ( ) ; int idx = getId ( ps . get ( 0 ) . getLayer ( ) ) ; String firstParam = idx + "_" + ps . get ( 0 ) . getParamName ( ) ; int size = stateViewsPerParam . get ( firstParam ) . size ( ) ; // For multiple params in the one block , we want to order like [ a0 , b0 , c0 ] [ a1 , b1 , c1] for ( int i = 0 ; i < size ; i ++ ) { for ( UpdaterBlock . ParamState p : ps ) { idx = getId ( p . getLayer ( ) ) ; String paramName = idx + "_" + p . getParamName ( ) ; INDArray arr = stateViewsPerParam . get ( paramName ) . get ( i ) ; toConcat . add ( arr ) ; } } } INDArray newUpdaterState = Nd4j . hstack ( toConcat ) ; Preconditions . checkState ( newUpdaterState . rank ( ) == 2 , "Expected rank 2" ) ; Preconditions . checkState ( origUpdaterState . length ( ) == newUpdaterState . length ( ) , "Updater state array lengths should be equal: got %s s. %s" , origUpdaterState . length ( ) , newUpdaterState . length ( ) ) ; return newUpdaterState ;
public class CommerceAddressRestrictionPersistenceImpl { /** * Returns the commerce address restrictions before and after the current commerce address restriction in the ordered set where commerceCountryId = & # 63 ; . * @ param commerceAddressRestrictionId the primary key of the current commerce address restriction * @ param commerceCountryId the commerce country ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce address restriction * @ throws NoSuchAddressRestrictionException if a commerce address restriction with the primary key could not be found */ @ Override public CommerceAddressRestriction [ ] findByCommerceCountryId_PrevAndNext ( long commerceAddressRestrictionId , long commerceCountryId , OrderByComparator < CommerceAddressRestriction > orderByComparator ) throws NoSuchAddressRestrictionException { } }
CommerceAddressRestriction commerceAddressRestriction = findByPrimaryKey ( commerceAddressRestrictionId ) ; Session session = null ; try { session = openSession ( ) ; CommerceAddressRestriction [ ] array = new CommerceAddressRestrictionImpl [ 3 ] ; array [ 0 ] = getByCommerceCountryId_PrevAndNext ( session , commerceAddressRestriction , commerceCountryId , orderByComparator , true ) ; array [ 1 ] = commerceAddressRestriction ; array [ 2 ] = getByCommerceCountryId_PrevAndNext ( session , commerceAddressRestriction , commerceCountryId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class PropertyChangeUtils { /** * Returns whether the given class seems to maintain PropertyChangeListener * instances . That is , whether the given class has the public instance * methods * < code > addPropertyChangeListener ( PropertyChangeListener ) < / code > * and * < code > removePropertyChangeListener ( PropertyChangeListener ) < / code > * @ param c The class * @ return Whether the given class maintains PropertyChangeListeners * @ throws NullPointerException If the given class is < code > null < / code > */ static boolean maintainsPropertyChangeListeners ( Class < ? > c ) { } }
Objects . requireNonNull ( c , "The class may not be null" ) ; Method addMethod = Methods . getMethodOptional ( c , "addPropertyChangeListener" , PropertyChangeListener . class ) ; if ( addMethod == null || ! isPublicInstanceMethod ( addMethod ) ) { return false ; } Method removeMethod = Methods . getMethodOptional ( c , "removePropertyChangeListener" , PropertyChangeListener . class ) ; if ( removeMethod == null || ! isPublicInstanceMethod ( removeMethod ) ) { return false ; } return true ;
public class OpenAPIModelFilterAdapter { /** * { @ inheritDoc } */ @ Override public OAuthFlows visitOAuthFlows ( Context context , OAuthFlows authFlows ) { } }
visitor . visitOAuthFlows ( context , authFlows ) ; return authFlows ;
public class PerformanceMetrics { /** * Creates new instance of performance metrics . Stores the key and correlation id of the parent metrics instance . * Assigns next level . * @ param nextAction a short name of measured operation , typically a first prefix of descriptor * @ param nextDescriptor a full description of measured operation * @ return PerformanceMetrics a new instance of performance metrics with stored key and correlationId from the parent * metrics and assigned next level */ public PerformanceMetrics createNext ( String nextAction , String nextDescriptor ) { } }
return createNext ( nextAction , nextDescriptor , null ) ;
public class RESTResponseHelper { /** * This method creates the TreeTank response XML element . * @ param document * The { @ link Document } instance for the response . * @ return The created XML { @ link Element } . */ private static Element createResultElement ( final Document document ) { } }
final Element ttResponse = document . createElementNS ( "http://jaxrx.org/" , "result" ) ; ttResponse . setPrefix ( "jaxrx" ) ; return ttResponse ;
public class StreamTransformation { /** * Sets an user provided hash for this operator . This will be used AS IS the create the * JobVertexID . * < p > The user provided hash is an alternative to the generated hashes , that is considered when * identifying an operator through the default hash mechanics fails ( e . g . because of changes * between Flink versions ) . * < p > < strong > Important < / strong > : this should be used as a workaround or for trouble shooting . * The provided hash needs to be unique per transformation and job . Otherwise , job submission * will fail . Furthermore , you cannot assign user - specified hash to intermediate nodes in an * operator chain and trying so will let your job fail . * < p > A use case for this is in migration between Flink versions or changing the jobs in a way * that changes the automatically generated hashes . In this case , providing the previous hashes * directly through this method ( e . g . obtained from old logs ) can help to reestablish a lost * mapping from states to their target operator . * @ param uidHash The user provided hash for this operator . This will become the JobVertexID , which is shown in the * logs and web ui . */ public void setUidHash ( String uidHash ) { } }
Preconditions . checkNotNull ( uidHash ) ; Preconditions . checkArgument ( uidHash . matches ( "^[0-9A-Fa-f]{32}$" ) , "Node hash must be a 32 character String that describes a hex code. Found: " + uidHash ) ; this . userProvidedNodeHash = uidHash ;
public class Order { /** * Return all or part of an order . The order must have a status of < code > paid < / code > or < code > * fulfilled < / code > before it can be returned . Once all items have been returned , the order will * become < code > canceled < / code > or < code > returned < / code > depending on which status the order * started in . */ public OrderReturn returnOrder ( Map < String , Object > params ) throws StripeException { } }
return returnOrder ( params , ( RequestOptions ) null ) ;
public class SrvShoppingCart { /** * < p > Handle event cart delivering or line changed * and redone forced service if need . < / p > * @ param pRqVs request scoped vars * @ param pCart cart * @ param pTxRules Tax Rules * @ throws Exception - an exception . */ @ Override public final void hndCartChan ( final Map < String , Object > pRqVs , final Cart pCart , final TaxDestination pTxRules ) throws Exception { } }
@ SuppressWarnings ( "unchecked" ) List < Deliv > dlvMts = ( List < Deliv > ) pRqVs . get ( "dlvMts" ) ; Deliv cdl = null ; for ( Deliv dl : dlvMts ) { if ( dl . getItsId ( ) . equals ( pCart . getDeliv ( ) ) ) { cdl = dl ; break ; } } if ( cdl == null ) { throw new Exception ( "wrong delivering!" ) ; } // it must be at least one item to add forced service : boolean crtEmpty = true ; CartLn clFrc = null ; CartLn clEm = null ; for ( CartLn cl : pCart . getItems ( ) ) { if ( cl . getDisab ( ) ) { clEm = cl ; } else if ( ! cl . getDisab ( ) && cl . getForc ( ) ) { clFrc = cl ; } else if ( ! cl . getDisab ( ) && ! cl . getForc ( ) ) { crtEmpty = false ; } } if ( clFrc == null && cdl . getFrcSr ( ) == null || cdl . getApMt ( ) == null ) { return ; } if ( crtEmpty ) { if ( clFrc != null ) { delLine ( pRqVs , clFrc , pTxRules ) ; } return ; } int cartTot ; AccSettings as = ( AccSettings ) pRqVs . get ( "accSet" ) ; TradingSettings ts = ( TradingSettings ) pRqVs . get ( "tradSet" ) ; BigDecimal ct = pCart . getTot ( ) ; if ( clFrc != null && clFrc . getTot ( ) . compareTo ( BigDecimal . ZERO ) == 1 ) { ct = ct . subtract ( clFrc . getTot ( ) ) ; } if ( pCart . getExcRt ( ) . compareTo ( BigDecimal . ONE ) == 0 ) { cartTot = ct . intValue ( ) ; } else { cartTot = ct . divide ( pCart . getExcRt ( ) , as . getPricePrecision ( ) , as . getRoundingMode ( ) ) . intValue ( ) ; } if ( cartTot >= cdl . getApMt ( ) ) { if ( clFrc != null && clFrc . getTot ( ) . compareTo ( BigDecimal . ZERO ) == 1 ) { clFrc . setPrice ( BigDecimal . ZERO ) ; clFrc . setTot ( BigDecimal . ZERO ) ; clFrc . setTotTx ( BigDecimal . ZERO ) ; clFrc . setSubt ( BigDecimal . ZERO ) ; clFrc . setTxDsc ( null ) ; clFrc . setTxCat ( null ) ; this . srvOrm . updateEntity ( pRqVs , clFrc ) ; makeCartTotals ( pRqVs , ts , clFrc , as , pTxRules ) ; } } else { if ( clFrc == null ) { if ( clEm == null ) { clFrc = new CartLn ( ) ; clFrc . setIsNew ( true ) ; clFrc . setItsOwner ( pCart ) ; pCart . getItems ( ) . add ( clFrc ) ; } else { clFrc = clEm ; } clFrc . setSel ( null ) ; clFrc . setForc ( true ) ; clFrc . setDisab ( false ) ; clFrc . setItTyp ( EShopItemType . SERVICE ) ; clFrc . setItId ( cdl . getFrcSr ( ) . getItsId ( ) ) ; clFrc . setItsName ( cdl . getFrcSr ( ) . getItsName ( ) ) ; clFrc . setUom ( cdl . getFrcSr ( ) . getDefUnitOfMeasure ( ) ) ; clFrc . setAvQuan ( BigDecimal . ONE ) ; clFrc . setQuant ( BigDecimal . ONE ) ; clFrc . setUnStep ( BigDecimal . ONE ) ; makeCartLine ( pRqVs , clFrc , as , ts , pTxRules , true , true ) ; makeCartTotals ( pRqVs , ts , clFrc , as , pTxRules ) ; } else if ( clFrc . getTot ( ) . compareTo ( BigDecimal . ZERO ) == 0 ) { makeCartLine ( pRqVs , clFrc , as , ts , pTxRules , true , true ) ; makeCartTotals ( pRqVs , ts , clFrc , as , pTxRules ) ; } }
public class LTPAKeyInfoManager { /** * Given the path to the LTPA key file return the WsResource for the file * if the file exists . * @ param ltpaKeyFile * @ return WsResource if the file exist , null if it does not . */ final WsResource getLTPAKeyFileResource ( WsLocationAdmin locService , String ltpaKeyFile ) { } }
WsResource ltpaFile = locService . resolveResource ( ltpaKeyFile ) ; if ( ltpaFile != null && ltpaFile . exists ( ) ) { return ltpaFile ; } else { // The file does not exist so return null return null ; }
public class DescribeResourcePermissionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeResourcePermissionsRequest describeResourcePermissionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeResourcePermissionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeResourcePermissionsRequest . getAuthenticationToken ( ) , AUTHENTICATIONTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeResourcePermissionsRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( describeResourcePermissionsRequest . getPrincipalId ( ) , PRINCIPALID_BINDING ) ; protocolMarshaller . marshall ( describeResourcePermissionsRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( describeResourcePermissionsRequest . getMarker ( ) , MARKER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ShardedThriftClientPool { /** * set new services for this pool ( TODO may throw * { @ link IndexOutOfBoundsException } at { @ link # getShardedPool ( Object ) } * in heavy load environment ) * @ param services * @ return previous pool , you must release each pool ( for example wait * some seconds while no access to pool ) */ public Map < Integer , ThriftClientPool < T > > setServices ( List < ServiceInfo > services ) { } }
synchronized ( poolMap ) { logger . info ( "reinit pool using new serviceList: {}" , services ) ; Map < Integer , ThriftClientPool < T > > previousMap = poolMap ; init ( services ) ; return previousMap ; }
public class SubjectUtils { /** * Makes a String representation of { @ code items } with collapsed duplicates and additional class * info . * < p > Example : { @ code countDuplicatesAndAddTypeInfo ( [ 1 , 2 , 2 , 3 ] ) = = " [ 1 , 2 [ 3 copies ] ] * ( java . lang . Integer ) " } and { @ code countDuplicatesAndAddTypeInfo ( [ 1 , 2L ] ) = = " [ 1 * ( java . lang . Integer ) , 2 ( java . lang . Long ) ] " } . */ static String countDuplicatesAndAddTypeInfo ( Iterable < ? > itemsIterable ) { } }
Collection < ? > items = iterableToCollection ( itemsIterable ) ; Optional < String > homogeneousTypeName = getHomogeneousTypeName ( items ) ; return homogeneousTypeName . isPresent ( ) ? lenientFormat ( "%s (%s)" , countDuplicates ( items ) , homogeneousTypeName . get ( ) ) : countDuplicates ( addTypeInfoToEveryItem ( items ) ) ;
public class Util { /** * Joins the items in array with a delimiter , and appends the result to StringBuilder . * @ param sb StringBuilder to append result to * @ param array array of items to join . * @ param delimiter delimiter to insert between elements of array . */ public static void join ( StringBuilder sb , Object [ ] array , String delimiter ) { } }
if ( empty ( array ) ) { return ; } sb . append ( array [ 0 ] ) ; for ( int i = 1 ; i < array . length ; i ++ ) { sb . append ( delimiter ) ; sb . append ( array [ i ] ) ; }
public class Compatibility { /** * Checks the name and descriptor for known compatibility issues and throws an * exception if an incompatibility is found . * If the column names are not compatible across components or if any * partition name duplicates its source field name , this will cause an error . * @ param namespace a String namespace * @ param name a String dataset name * @ param descriptor a { @ link DatasetDescriptor } */ public static void check ( String namespace , String name , DatasetDescriptor descriptor ) { } }
checkDatasetName ( namespace , name ) ; checkDescriptor ( descriptor ) ;
public class AbstractScriptParser { /** * 获取真实的缓存时间值 * @ param expire 缓存时间 * @ param expireExpression 缓存时间表达式 * @ param arguments 方法参数 * @ param result 方法执行返回结果 * @ return real expire * @ throws Exception 异常 */ public int getRealExpire ( int expire , String expireExpression , Object [ ] arguments , Object result ) throws Exception { } }
Integer tmpExpire = null ; if ( null != expireExpression && expireExpression . length ( ) > 0 ) { tmpExpire = this . getElValue ( expireExpression , null , arguments , result , true , Integer . class ) ; if ( null != tmpExpire && tmpExpire . intValue ( ) >= 0 ) { // 返回缓存时间表达式计算的时间 return tmpExpire . intValue ( ) ; } } return expire ;
public class FineUploader5Resume { /** * The number of days before a persistent resume record will expire . * @ param nRecordsExpireIn * New value . Must be & ge ; 0. * @ return this for chaining */ @ Nonnull public FineUploader5Resume setRecordsExpireIn ( @ Nonnegative final int nRecordsExpireIn ) { } }
ValueEnforcer . isGE0 ( nRecordsExpireIn , "RecordsExpireIn" ) ; m_nResumeRecordsExpireIn = nRecordsExpireIn ; return this ;
public class FormLayoutFormBuilder { /** * Add a binder to a column and a row . Equals to builder . addBinding ( component , column , * row ) . * @ param binding The binding to add * @ param column The column on which the binding must be added * @ param column The row on which the binding must be added * @ return The component produced by the binding * @ see # addBinding ( Binding , int , int , int , int ) */ public JComponent addBinding ( Binding binding , int column , int row ) { } }
return this . addBinding ( binding , column , row , 1 , 1 ) ;
public class ScenarioExecutor { /** * Starts the scenario with the given method and arguments . * Derives the description from the method name . * @ param method the method that started the scenario * @ param arguments the test arguments with their parameter names */ public void startScenario ( Class < ? > testClass , Method method , List < NamedArgument > arguments ) { } }
listener . scenarioStarted ( testClass , method , arguments ) ; if ( method . isAnnotationPresent ( Pending . class ) ) { Pending annotation = method . getAnnotation ( Pending . class ) ; if ( annotation . failIfPass ( ) ) { failIfPass ( ) ; } else if ( ! annotation . executeSteps ( ) ) { methodInterceptor . disableMethodExecution ( ) ; executeLifeCycleMethods = false ; } suppressExceptions = true ; } else if ( method . isAnnotationPresent ( NotImplementedYet . class ) ) { NotImplementedYet annotation = method . getAnnotation ( NotImplementedYet . class ) ; if ( annotation . failIfPass ( ) ) { failIfPass ( ) ; } else if ( ! annotation . executeSteps ( ) ) { methodInterceptor . disableMethodExecution ( ) ; executeLifeCycleMethods = false ; } suppressExceptions = true ; }
public class ReaderGroupProperty { /** * Convert the given object to string with each line indented by 4 spaces * ( except the first line ) . */ private String toIndentedString ( java . lang . Object o ) { } }
if ( o == null ) { return "null" ; } return o . toString ( ) . replace ( "\n" , "\n " ) ;
public class NatsTransporter { /** * - - - CONNECT - - - */ @ Override public void connect ( ) { } }
try { // Create NATS client options Options . Builder builder = new Options . Builder ( ) ; if ( secure ) { builder . secure ( ) ; } if ( username != null && password != null && ! username . isEmpty ( ) && ! password . isEmpty ( ) ) { builder . userInfo ( username , password ) ; } if ( sslContext != null ) { builder . sslContext ( sslContext ) ; } if ( noRandomize ) { builder . noRandomize ( ) ; } builder . maxPingsOut ( maxPingsOut ) ; builder . pingInterval ( Duration . ofMillis ( pingInterval ) ) ; builder . connectionTimeout ( Duration . ofMillis ( connectionTimeout ) ) ; if ( verbose ) { builder . verbose ( ) ; } builder . bufferSize ( bufferSize ) ; builder . authHandler ( authHandler ) ; if ( noEcho ) { builder . noEcho ( ) ; } if ( opentls ) { builder . opentls ( ) ; } if ( pedantic ) { builder . pedantic ( ) ; } if ( advancedStats ) { builder . turnOnAdvancedStats ( ) ; } if ( utf8Support ) { builder . supportUTF8Subjects ( ) ; } if ( oldRequestStyle ) { builder . oldRequestStyle ( ) ; } builder . connectionListener ( this ) ; builder . noReconnect ( ) ; // Set server URLs for ( String url : urls ) { if ( url . indexOf ( ':' ) == - 1 ) { url = url + ":4222" ; } if ( url . indexOf ( "://" ) == - 1 ) { url = "nats://" + url ; } builder . server ( url ) ; } // Connect to NATS server disconnect ( ) ; started . set ( true ) ; Options options = builder . build ( ) ; client = Nats . connect ( options ) ; dispatcher = client . createDispatcher ( this ) ; logger . info ( "NATS pub-sub connection estabilished." ) ; connected ( ) ; } catch ( Exception cause ) { String msg = cause . getMessage ( ) ; if ( msg == null || msg . isEmpty ( ) ) { msg = "Unable to connect to NATS server!" ; } else if ( ! msg . endsWith ( "!" ) && ! msg . endsWith ( "." ) ) { msg += "!" ; } logger . warn ( msg ) ; }
public class HFSUtils { /** * Converts the memory size to number of sectors . * @ param requestSize requested filesystem size in bytes * @ param sectorSize the size of each sector in bytes * @ return total sectors of HFS + including estimated metadata zone size */ public static long getNumSector ( String requestSize , String sectorSize ) { } }
Double memSize = Double . parseDouble ( requestSize ) ; Double sectorBytes = Double . parseDouble ( sectorSize ) ; Double nSectors = memSize / sectorBytes ; Double memSizeKB = memSize / 1024 ; Double memSizeGB = memSize / ( 1024 * 1024 * 1024 ) ; Double memSize100GB = memSizeGB / 100 ; // allocation bitmap file : one bit per sector Double allocBitmapSize = nSectors / 8 ; // extend overflow file : 4MB , plus 4MB per 100GB Double extOverflowFileSize = memSize100GB * 1024 * 1024 * 4 ; // journal file : 8MB , plus 8MB per 100GB Double journalFileSize = memSize100GB * 1024 * 1024 * 8 ; // catalog file : 10bytes per KB Double catalogFileSize = memSizeKB * 10 ; // hot files : 5bytes per KB Double hotFileSize = memSizeKB * 5 ; // quota users file and quota groups file Double quotaUsersFileSize = ( memSizeGB * 256 + 1 ) * 64 ; Double quotaGroupsFileSize = ( memSizeGB * 32 + 1 ) * 64 ; Double metadataSize = allocBitmapSize + extOverflowFileSize + journalFileSize + catalogFileSize + hotFileSize + quotaUsersFileSize + quotaGroupsFileSize ; Double allocSize = memSize + metadataSize ; Double numSectors = allocSize / sectorBytes ; System . out . println ( numSectors . longValue ( ) + 1 ) ; // round up return numSectors . longValue ( ) + 1 ;
public class ADStarNodeExpander { /** * Updating the priority of a node is required when changing the value of Epsilon . */ public void updateKey ( N node ) { } }
node . getKey ( ) . update ( node . getG ( ) , node . getV ( ) , heuristicFunction . estimate ( node . state ( ) ) , epsilon , add , scale ) ;
public class KeyVaultClientCustomImpl { /** * Updates the specified certificate issuer . * @ param updateCertificateIssuerRequest * the grouped properties for updating a certificate issuer request * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object * @ throws IllegalArgumentException * thrown if callback is null */ public ServiceFuture < IssuerBundle > updateCertificateIssuerAsync ( UpdateCertificateIssuerRequest updateCertificateIssuerRequest , final ServiceCallback < IssuerBundle > serviceCallback ) { } }
return updateCertificateIssuerAsync ( updateCertificateIssuerRequest . vaultBaseUrl ( ) , updateCertificateIssuerRequest . issuerName ( ) , updateCertificateIssuerRequest . provider ( ) , updateCertificateIssuerRequest . credentials ( ) , updateCertificateIssuerRequest . organizationDetails ( ) , updateCertificateIssuerRequest . attributes ( ) , serviceCallback ) ;
public class ControlBean { /** * The postInvoke method is called after all operations on the control . It is the basic * hook for logging , context initialization , resource management , and other common * services . */ protected void postInvoke ( Method m , Object [ ] args , Object retval , Throwable t ) { } }
postInvoke ( m , args , retval , t , null , null ) ;
public class MetadataFinder { /** * Finishes the process of attaching a metadata cache file once it has been opened and validated . * @ param slot the slot to which the cache should be attached * @ param cache the opened , validated metadata cache file */ void attachMetadataCacheInternal ( SlotReference slot , MetadataCache cache ) { } }
MetadataCache oldCache = metadataCacheFiles . put ( slot , cache ) ; if ( oldCache != null ) { try { oldCache . close ( ) ; } catch ( IOException e ) { logger . error ( "Problem closing previous metadata cache" , e ) ; } } deliverCacheUpdate ( slot , cache ) ;
public class LogOutputFactory { /** * Creates a pre - populated StringBuilder instance . The output contains the following : * " Time = < now > RequestTime = < duration > URL = < request URL > Method = < http method > Format = < requested format e . g . ' json ' > Resource = < route pattern > Machine = < machineName > CorrelationId = < request correlation id > Status = < http status > User - Agent = < user agent string > Referrer = < referrer http header > " * However , if jvmId , User - Agent , or Referer are null , they are not included in the output . The CorrelationId * value is the RestExpress Request correlation id , which is unique within a single JVM . * @ param request a RestExpress Request instance . * @ param response a RestExpress Response instance . * @ param duration the duration of the request , in milliseconds . * @ return a pre - populated StringBuilder . */ public StringBuilder createStringBuilder ( Request request , Response response , Long duration ) { } }
StringBuilder builder = new StringBuilder ( ) ; builder . append ( "Time=" + ( ( DateFormat ) DATE_FORMAT . clone ( ) ) . format ( new Date ( ) ) ) ; builder . append ( " RequestTime=" + duration ) ; builder . append ( " URL=" + request . getUrl ( ) ) ; builder . append ( " Method=" + request . getHttpMethod ( ) . name ( ) ) ; builder . append ( " Format=" + request . getFormat ( ) ) ; if ( request . getResolvedRoute ( ) != null ) { builder . append ( " Resource=" + request . getResolvedRoute ( ) . getFullPattern ( ) ) ; } if ( machineName != null ) { builder . append ( " Machine=" + machineName ) ; } builder . append ( " CorrelationId=" + request . getCorrelationId ( ) ) ; builder . append ( " Status=" + response . getResponseStatus ( ) . code ( ) ) ; if ( request . getHeader ( "User-Agent" ) != null ) { builder . append ( " UserAgent=" + request . getHeader ( "User-Agent" ) ) ; } if ( request . getHeader ( "Referer" ) != null ) { builder . append ( " UrlReferer=" + request . getHeader ( "Referer" ) ) ; } return builder ;
public class ServerSideEncryption { /** * Create a new server - side - encryption object for encryption with customer * provided keys ( a . k . a . SSE - C ) . * @ param key The secret AES - 256 key . * @ return An instance of ServerSideEncryption implementing SSE - C . * @ throws InvalidKeyException if the provided secret key is not a 256 bit AES key . * @ throws NoSuchAlgorithmException if the crypto provider does not implement MD5. */ public static ServerSideEncryption copyWithCustomerKey ( SecretKey key ) throws InvalidKeyException , NoSuchAlgorithmException { } }
if ( ! isCustomerKeyValid ( key ) ) { throw new InvalidKeyException ( "The secret key is not a 256 bit AES key" ) ; } return new ServerSideEncryptionCopyWithCustomerKey ( key , MessageDigest . getInstance ( ( "MD5" ) ) ) ;
public class SearchFavouritesServiceInMemoryImpl { /** * If creator is not set , only shared favourites will be checked ( if * shared ) ! */ public void deleteSearchFavourite ( SearchFavourite sf ) throws IOException { } }
synchronized ( lock ) { if ( sf == null || sf . getId ( ) == null ) { throw new IllegalArgumentException ( "null, or id not set!" ) ; } else { allFavourites . remove ( sf . getId ( ) ) ; if ( sf . isShared ( ) ) { sharedFavourites . remove ( sf . getId ( ) ) ; } else { if ( sf . getCreator ( ) != null ) { Map < Long , SearchFavourite > favs = privateFavourites . get ( sf . getCreator ( ) ) ; if ( favs != null ) { favs . remove ( sf . getId ( ) ) ; } } else { log . warn ( "Creator is not set! I'm not checking all users so I'm giving up." ) ; } } } }
public class ApplicationSecurityGroupsInner { /** * Creates or updates an application security group . * @ param resourceGroupName The name of the resource group . * @ param applicationSecurityGroupName The name of the application security group . * @ param parameters Parameters supplied to the create or update ApplicationSecurityGroup operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ApplicationSecurityGroupInner > createOrUpdateAsync ( String resourceGroupName , String applicationSecurityGroupName , ApplicationSecurityGroupInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , applicationSecurityGroupName , parameters ) . map ( new Func1 < ServiceResponse < ApplicationSecurityGroupInner > , ApplicationSecurityGroupInner > ( ) { @ Override public ApplicationSecurityGroupInner call ( ServiceResponse < ApplicationSecurityGroupInner > response ) { return response . body ( ) ; } } ) ;
public class World { /** * Sets the { @ code PublicRootActor } instances as a { @ code Stoppable } . ( INTERNAL ONLY ) * @ param privateRoot the { @ code Stoppable } protocol backed by the { @ code PrivateRootActor } */ synchronized void setPublicRoot ( final Stoppable publicRoot ) { } }
if ( publicRoot != null && this . publicRoot != null ) { throw new IllegalStateException ( "The public root already exists." ) ; } this . publicRoot = publicRoot ;
public class ValueAnimator { /** * Called internally to start an animation by adding it to the active animations list . Must be * called on the UI thread . */ private void startAnimation ( ) { } }
initAnimation ( ) ; sAnimations . get ( ) . add ( this ) ; if ( mStartDelay > 0 && mListeners != null ) { // Listeners were already notified in start ( ) if startDelay is 0 ; this is // just for delayed animations ArrayList < AnimatorListener > tmpListeners = ( ArrayList < AnimatorListener > ) mListeners . clone ( ) ; int numListeners = tmpListeners . size ( ) ; for ( int i = 0 ; i < numListeners ; ++ i ) { tmpListeners . get ( i ) . onAnimationStart ( this ) ; } }
public class AmazonCloudFormationClient { /** * Returns the summary information for stacks whose status matches the specified StackStatusFilter . Summary * information for stacks that have been deleted is kept for 90 days after the stack is deleted . If no * StackStatusFilter is specified , summary information for all stacks is returned ( including existing stacks and * stacks that have been deleted ) . * @ param listStacksRequest * The input for < a > ListStacks < / a > action . * @ return Result of the ListStacks operation returned by the service . * @ sample AmazonCloudFormation . ListStacks * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudformation - 2010-05-15 / ListStacks " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListStacksResult listStacks ( ListStacksRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListStacks ( request ) ;
public class ZipUtil { /** * Changes an existing ZIP file : replaces a given entry in it . * @ param zip * an existing ZIP file . * @ param entry * new ZIP entry . * @ return < code > true < / code > if the entry was replaced . */ public static boolean replaceEntry ( final File zip , final ZipEntrySource entry ) { } }
return operateInPlace ( zip , new InPlaceAction ( ) { public boolean act ( File tmpFile ) { return replaceEntry ( zip , entry , tmpFile ) ; } } ) ;
public class CmsCroppingParamBean { /** * Parses an image scale parameter and returns the parsed data . < p > * @ param param the image path including the scale parameter * @ return the cropping data */ public static CmsCroppingParamBean parseScaleParam ( String param ) { } }
CmsCroppingParamBean result = new CmsCroppingParamBean ( ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( param ) ) { return result ; } String [ ] parameters = param . split ( SCALE_PARAM_DELIMITER ) ; for ( int i = 0 ; i < parameters . length ; i ++ ) { String scaleParam = parameters [ i ] . trim ( ) ; if ( scaleParam . startsWith ( SCALE_PARAM_TARGETHEIGHT + SCALE_PARAM_COLON ) ) { result . setTargetHeight ( parseValue ( SCALE_PARAM_TARGETHEIGHT , scaleParam ) ) ; continue ; } if ( scaleParam . startsWith ( SCALE_PARAM_TARGETWIDTH + SCALE_PARAM_COLON ) ) { result . setTargetWidth ( parseValue ( SCALE_PARAM_TARGETWIDTH , scaleParam ) ) ; continue ; } if ( scaleParam . startsWith ( SCALE_PARAM_CROP_X + SCALE_PARAM_COLON ) ) { result . setCropX ( parseValue ( SCALE_PARAM_CROP_X , scaleParam ) ) ; continue ; } if ( scaleParam . startsWith ( SCALE_PARAM_CROP_Y + SCALE_PARAM_COLON ) ) { result . setCropY ( parseValue ( SCALE_PARAM_CROP_Y , scaleParam ) ) ; continue ; } if ( scaleParam . startsWith ( SCALE_PARAM_CROP_HEIGHT + SCALE_PARAM_COLON ) ) { result . setCropHeight ( parseValue ( SCALE_PARAM_CROP_HEIGHT , scaleParam ) ) ; continue ; } if ( scaleParam . startsWith ( SCALE_PARAM_CROP_WIDTH + SCALE_PARAM_COLON ) ) { result . setCropWidth ( parseValue ( SCALE_PARAM_CROP_WIDTH , scaleParam ) ) ; continue ; } } return result ;
public class UnsafeMappedBytes { /** * Allocates a mapped buffer . * Memory will be mapped by opening and expanding the given { @ link java . io . File } to the desired { @ code count } and mapping the * file contents into memory via { @ link java . nio . channels . FileChannel # map ( java . nio . channels . FileChannel . MapMode , long , long ) } . * @ param file The file to map into memory . If the file doesn ' t exist it will be automatically created . * @ param mode The mode with which to map the file . * @ param size The count of the buffer to allocate ( in bytes ) . * @ return The mapped buffer . * @ throws NullPointerException If { @ code file } is { @ code null } * @ throws IllegalArgumentException If { @ code count } is greater than { @ link MappedMemory # MAX _ SIZE } * @ see UnsafeMappedBytes # allocate ( java . io . File , long ) */ public static UnsafeMappedBytes allocate ( File file , FileChannel . MapMode mode , long size ) { } }
if ( file == null ) throw new NullPointerException ( "file cannot be null" ) ; if ( mode == null ) mode = MappedMemoryAllocator . DEFAULT_MAP_MODE ; if ( size > MappedMemory . MAX_SIZE ) throw new IllegalArgumentException ( "size for MappedBytes cannot be greater than " + MappedMemory . MAX_SIZE ) ; return new UnsafeMappedBytes ( file , MappedMemory . allocate ( file , mode , size ) ) ;
public class CassandraStorage { /** * read wide row */ public Tuple getNextWide ( ) throws IOException { } }
CfInfo cfInfo = getCfInfo ( loadSignature ) ; CfDef cfDef = cfInfo . cfDef ; ByteBuffer key = null ; Tuple tuple = null ; DefaultDataBag bag = new DefaultDataBag ( ) ; try { while ( true ) { hasNext = reader . nextKeyValue ( ) ; if ( ! hasNext ) { if ( tuple == null ) tuple = TupleFactory . getInstance ( ) . newTuple ( ) ; if ( lastRow != null ) { if ( tuple . size ( ) == 0 ) // lastRow is a new one { key = ( ByteBuffer ) reader . getCurrentKey ( ) ; tuple = keyToTuple ( key , cfDef , parseType ( cfDef . getKey_validation_class ( ) ) ) ; } for ( Map . Entry < ByteBuffer , Cell > entry : lastRow . entrySet ( ) ) { bag . add ( columnToTuple ( entry . getValue ( ) , cfInfo , parseType ( cfDef . getComparator_type ( ) ) ) ) ; } lastKey = null ; lastRow = null ; tuple . append ( bag ) ; return tuple ; } else { if ( tuple . size ( ) == 1 ) // rare case of just one wide row , key already set { tuple . append ( bag ) ; return tuple ; } else return null ; } } if ( key != null && ! ( ( ByteBuffer ) reader . getCurrentKey ( ) ) . equals ( key ) ) // key changed { // read too much , hold on to it for next time lastKey = ( ByteBuffer ) reader . getCurrentKey ( ) ; lastRow = ( SortedMap < ByteBuffer , Cell > ) reader . getCurrentValue ( ) ; // but return what we have so far tuple . append ( bag ) ; return tuple ; } if ( key == null ) // only set the key on the first iteration { key = ( ByteBuffer ) reader . getCurrentKey ( ) ; if ( lastKey != null && ! ( key . equals ( lastKey ) ) ) // last key only had one value { if ( tuple == null ) tuple = keyToTuple ( lastKey , cfDef , parseType ( cfDef . getKey_validation_class ( ) ) ) ; else addKeyToTuple ( tuple , lastKey , cfDef , parseType ( cfDef . getKey_validation_class ( ) ) ) ; for ( Map . Entry < ByteBuffer , Cell > entry : lastRow . entrySet ( ) ) { bag . add ( columnToTuple ( entry . getValue ( ) , cfInfo , parseType ( cfDef . getComparator_type ( ) ) ) ) ; } tuple . append ( bag ) ; lastKey = key ; lastRow = ( SortedMap < ByteBuffer , Cell > ) reader . getCurrentValue ( ) ; return tuple ; } if ( tuple == null ) tuple = keyToTuple ( key , cfDef , parseType ( cfDef . getKey_validation_class ( ) ) ) ; else addKeyToTuple ( tuple , lastKey , cfDef , parseType ( cfDef . getKey_validation_class ( ) ) ) ; } SortedMap < ByteBuffer , Cell > row = ( SortedMap < ByteBuffer , Cell > ) reader . getCurrentValue ( ) ; if ( lastRow != null ) // prepend what was read last time { for ( Map . Entry < ByteBuffer , Cell > entry : lastRow . entrySet ( ) ) { bag . add ( columnToTuple ( entry . getValue ( ) , cfInfo , parseType ( cfDef . getComparator_type ( ) ) ) ) ; } lastKey = null ; lastRow = null ; } for ( Map . Entry < ByteBuffer , Cell > entry : row . entrySet ( ) ) { bag . add ( columnToTuple ( entry . getValue ( ) , cfInfo , parseType ( cfDef . getComparator_type ( ) ) ) ) ; } } } catch ( InterruptedException e ) { throw new IOException ( e . getMessage ( ) ) ; }
public class LRImporter { /** * Obtain the path used for an obtain request * @ param requestID the " request _ ID " parameter for the request * @ param byResourceID the " by _ resource _ ID " parameter for the request * @ param byDocID the " by _ doc _ ID " parameter for the request * @ param idsOnly the " ids _ only " parameter for the request * @ param resumptionToken the " resumption _ token " parameter for the request * @ return the string of the path for an obtain request */ private String getObtainRequestPath ( String requestID , Boolean byResourceID , Boolean byDocID , Boolean idsOnly , String resumptionToken ) { } }
String path = obtainPath ; if ( resumptionToken != null ) { path += "?" + resumptionTokenParam + "=" + resumptionToken ; return path ; } if ( requestID != null ) { path += "?" + requestIDParam + "=" + requestID ; } else { // error return null ; } if ( byResourceID ) { path += "&" + byResourceIDParam + "=" + booleanTrueString ; } else { path += "&" + byResourceIDParam + "=" + booleanFalseString ; } if ( byDocID ) { path += "&" + byDocIDParam + "=" + booleanTrueString ; } else { path += "&" + byDocIDParam + "=" + booleanFalseString ; } if ( idsOnly ) { path += "&" + idsOnlyParam + "=" + booleanTrueString ; } else { path += "&" + idsOnlyParam + "=" + booleanFalseString ; } return path ;
public class DesignDocumentManager { /** * Performs a query to retrieve all the design documents defined in the database . * @ return a list of the design documents from the database * @ throws IOException if there was an error communicating with the server * @ since 2.5.0 */ public List < DesignDocument > list ( ) throws IOException { } }
return db . getAllDocsRequestBuilder ( ) . startKey ( "_design/" ) . endKey ( "_design0" ) . inclusiveEnd ( false ) . includeDocs ( true ) . build ( ) . getResponse ( ) . getDocsAs ( DesignDocument . class ) ;
public class MediaManagementApi { /** * Complete a bulk of interactions * Complete a bulk of interactions * @ param mgtCancel ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse mgtComplete ( MgtCancel1 mgtCancel ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = mgtCompleteWithHttpInfo ( mgtCancel ) ; return resp . getData ( ) ;
public class GraphReaderAdapter { /** * { @ inheritDoc } */ public Graph < Edge > readUndirected ( File f , Indexer < String > vertexLabels ) throws IOException { } }
throw new UnsupportedOperationException ( ) ;
public class RequestHttpBase { /** * Sets a footer , replacing an already - existing footer * @ param key the header key to set . * @ param value the header value to set . */ public void setFooter ( String key , String value ) { } }
Objects . requireNonNull ( value ) ; int i = 0 ; boolean hasFooter = false ; for ( i = _footerKeys . size ( ) - 1 ; i >= 0 ; i -- ) { String oldKey = _footerKeys . get ( i ) ; if ( oldKey . equalsIgnoreCase ( key ) ) { if ( hasFooter ) { _footerKeys . remove ( i ) ; _footerValues . remove ( i ) ; } else { hasFooter = true ; _footerValues . set ( i , value ) ; } } } if ( ! hasFooter ) { _footerKeys . add ( key ) ; _footerValues . add ( value ) ; }
public class CachedFwAssistantDirector { public FwCoreDirection assistCoreDirection ( ) { } }
if ( coreDirection != null ) { return coreDirection ; } synchronized ( this ) { if ( coreDirection != null ) { return coreDirection ; } final FwCoreDirection direction = createCoreDirection ( ) ; prepareCoreDirection ( direction ) ; coreDirection = direction ; } return coreDirection ;
import java . util . regex . * ; public class ValidateString { /** * Function to verify if a given string begins with a vowel , using RegEx . * Args : * input _ string : A string to be validated . * Returns : * " Valid " if the string starts with a vowel , " Invalid " otherwise . * Examples : * > > > validate _ string ( ' apple ' ) * ' Valid ' * > > > validate _ string ( ' grape ' ) * ' Invalid ' * > > > validate _ string ( ' Orange ' ) * ' Valid ' */ public static String validateString ( String inputString ) { } }
Pattern p = Pattern . compile ( "^[aeiouAEIOU]" ) ; Matcher m = p . matcher ( inputString ) ; if ( m . find ( ) ) { return "Valid" ; } else { return "Invalid" ; }
public class AVMixPushManager { /** * 取消混合推送的注册 * 取消成功后 , 消息会通过 LeanCloud websocket 发送 */ public static void unRegisterMixPush ( ) { } }
AVInstallation installation = AVInstallation . getCurrentInstallation ( ) ; String vendor = installation . getString ( AVInstallation . VENDOR ) ; if ( ! StringUtil . isEmpty ( vendor ) ) { installation . put ( AVInstallation . VENDOR , "lc" ) ; installation . saveInBackground ( ) . subscribe ( ObserverBuilder . buildSingleObserver ( new SaveCallback ( ) { @ Override public void done ( AVException e ) { if ( null != e ) { printErrorLog ( "unRegisterMixPush error!" ) ; } else { LOGGER . d ( "Registration canceled successfully!" ) ; } } } ) ) ; }
public class druidGParser { /** * druidG . g : 355:1 : pairNums returns [ Pair < Integer , Integer > pair ] : ( LSQUARE ( WS ) ? i = LONG ( WS ) ? ' , ' ( WS ) ? j = LONG ( WS ) ? RSQUARE ) ; */ public final Pair < Integer , Integer > pairNums ( ) throws RecognitionException { } }
Pair < Integer , Integer > pair = null ; Token i = null ; Token j = null ; try { // druidG . g : 356:2 : ( ( LSQUARE ( WS ) ? i = LONG ( WS ) ? ' , ' ( WS ) ? j = LONG ( WS ) ? RSQUARE ) ) // druidG . g : 356:4 : ( LSQUARE ( WS ) ? i = LONG ( WS ) ? ' , ' ( WS ) ? j = LONG ( WS ) ? RSQUARE ) { // druidG . g : 356:4 : ( LSQUARE ( WS ) ? i = LONG ( WS ) ? ' , ' ( WS ) ? j = LONG ( WS ) ? RSQUARE ) // druidG . g : 356:5 : LSQUARE ( WS ) ? i = LONG ( WS ) ? ' , ' ( WS ) ? j = LONG ( WS ) ? RSQUARE { match ( input , LSQUARE , FOLLOW_LSQUARE_in_pairNums2489 ) ; // druidG . g : 356:13 : ( WS ) ? int alt169 = 2 ; int LA169_0 = input . LA ( 1 ) ; if ( ( LA169_0 == WS ) ) { alt169 = 1 ; } switch ( alt169 ) { case 1 : // druidG . g : 356:13 : WS { match ( input , WS , FOLLOW_WS_in_pairNums2491 ) ; } break ; } i = ( Token ) match ( input , LONG , FOLLOW_LONG_in_pairNums2496 ) ; // druidG . g : 356:25 : ( WS ) ? int alt170 = 2 ; int LA170_0 = input . LA ( 1 ) ; if ( ( LA170_0 == WS ) ) { alt170 = 1 ; } switch ( alt170 ) { case 1 : // druidG . g : 356:25 : WS { match ( input , WS , FOLLOW_WS_in_pairNums2499 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_pairNums2502 ) ; // druidG . g : 356:33 : ( WS ) ? int alt171 = 2 ; int LA171_0 = input . LA ( 1 ) ; if ( ( LA171_0 == WS ) ) { alt171 = 1 ; } switch ( alt171 ) { case 1 : // druidG . g : 356:33 : WS { match ( input , WS , FOLLOW_WS_in_pairNums2504 ) ; } break ; } j = ( Token ) match ( input , LONG , FOLLOW_LONG_in_pairNums2509 ) ; // druidG . g : 356:44 : ( WS ) ? int alt172 = 2 ; int LA172_0 = input . LA ( 1 ) ; if ( ( LA172_0 == WS ) ) { alt172 = 1 ; } switch ( alt172 ) { case 1 : // druidG . g : 356:44 : WS { match ( input , WS , FOLLOW_WS_in_pairNums2511 ) ; } break ; } match ( input , RSQUARE , FOLLOW_RSQUARE_in_pairNums2514 ) ; } pair = new Pair < > ( Integer . parseInt ( ( i != null ? i . getText ( ) : null ) ) , Integer . parseInt ( ( j != null ? j . getText ( ) : null ) ) ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving } return pair ;
public class Indexer { /** * Flushes lists of added / removed nodes to SearchManagers , starting indexing . * @ param addedNodes * @ param removedNodes * @ param parentAddedNodes * @ param parentRemovedNodes */ public void updateIndex ( Set < String > addedNodes , Set < String > removedNodes , Set < String > parentAddedNodes , Set < String > parentRemovedNodes ) { } }
// pass lists to search manager if ( searchManager != null && ( addedNodes . size ( ) > 0 || removedNodes . size ( ) > 0 ) ) { try { searchManager . updateIndex ( removedNodes , addedNodes ) ; } catch ( RepositoryException e ) { log . error ( "Error indexing changes " + e , e ) ; } catch ( IOException e ) { log . error ( "Error indexing changes " + e , e ) ; try { handler . logErrorChanges ( removedNodes , addedNodes ) ; } catch ( IOException ioe ) { log . warn ( "Exception occure when errorLog writed. Error log is not complete. " + ioe , ioe ) ; } } } // pass lists to parent search manager if ( parentSearchManager != null && ( parentAddedNodes . size ( ) > 0 || parentRemovedNodes . size ( ) > 0 ) ) { try { parentSearchManager . updateIndex ( parentRemovedNodes , parentAddedNodes ) ; } catch ( RepositoryException e ) { log . error ( "Error indexing changes " + e , e ) ; } catch ( IOException e ) { log . error ( "Error indexing changes " + e , e ) ; try { parentHandler . logErrorChanges ( parentRemovedNodes , parentAddedNodes ) ; } catch ( IOException ioe ) { log . warn ( "Exception occure when errorLog writed. Error log is not complete. " + ioe , ioe ) ; } } }
public class Expressions { /** * Create a new TimeExpression * @ param expr the time Expression * @ return new TimeExpression */ public static < T extends Comparable < ? > > TimeExpression < T > asTime ( Expression < T > expr ) { } }
Expression < T > underlyingMixin = ExpressionUtils . extract ( expr ) ; if ( underlyingMixin instanceof PathImpl ) { return new TimePath < T > ( ( PathImpl < T > ) underlyingMixin ) ; } else if ( underlyingMixin instanceof OperationImpl ) { return new TimeOperation < T > ( ( OperationImpl < T > ) underlyingMixin ) ; } else if ( underlyingMixin instanceof TemplateExpressionImpl ) { return new TimeTemplate < T > ( ( TemplateExpressionImpl < T > ) underlyingMixin ) ; } else { return new TimeExpression < T > ( underlyingMixin ) { private static final long serialVersionUID = - 2402288239000668173L ; @ Override public < R , C > R accept ( Visitor < R , C > v , C context ) { return this . mixin . accept ( v , context ) ; } } ; }
public class WebSiteManagementClientImpl { /** * Validate whether a resource can be moved . * Validate whether a resource can be moved . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param moveResourceEnvelope Object that represents the resource to move . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > validateMoveAsync ( String resourceGroupName , CsmMoveResourceEnvelope moveResourceEnvelope , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( validateMoveWithServiceResponseAsync ( resourceGroupName , moveResourceEnvelope ) , serviceCallback ) ;
public class GitlabAPI { /** * Gets a build for a project * @ param projectId the project id * @ param jobId the build id * @ return A list of project jobs * @ throws IOException on gitlab api call error */ public GitlabJob getProjectJob ( Integer projectId , Integer jobId ) throws IOException { } }
String tailUrl = GitlabProject . URL + "/" + sanitizeProjectId ( projectId ) + GitlabJob . URL + "/" + jobId ; return retrieve ( ) . to ( tailUrl , GitlabJob . class ) ;
public class AipBodyAnalysis { /** * 人体关键点识别接口 * 对于输入的一张图片 ( 可正常解码 , 且长宽比适宜 ) , * * 检测图片中的所有人体 , 输出每个人体的14个主要关键点 , 包含四肢 、 脖颈 、 鼻子等部位 , 以及人体的坐标信息和数量 * * 。 * @ param image - 二进制图像数据 * @ param options - 可选参数对象 , key : value都为string类型 * options - options列表 : * @ return JSONObject */ public JSONObject bodyAnalysis ( byte [ ] image , HashMap < String , String > options ) { } }
AipRequest request = new AipRequest ( ) ; preOperation ( request ) ; String base64Content = Base64Util . encode ( image ) ; request . addBody ( "image" , base64Content ) ; if ( options != null ) { request . addBody ( options ) ; } request . setUri ( BodyAnalysisConsts . BODY_ANALYSIS ) ; postOperation ( request ) ; return requestServer ( request ) ;
public class SmapGenerator { /** * Methods for serializing the logical SMAP */ public synchronized String getString ( ) { } }
// check state and initialize buffer if ( outputFileName == null ) throw new IllegalStateException ( ) ; StringBuffer out = new StringBuffer ( ) ; // start the SMAP out . append ( "SMAP\n" ) ; out . append ( outputFileName + '\n' ) ; out . append ( defaultStratum + '\n' ) ; // include embedded SMAPs if ( doEmbedded ) { int nEmbedded = embedded . size ( ) ; for ( int i = 0 ; i < nEmbedded ; i ++ ) { out . append ( embedded . get ( i ) ) ; } } // print our StratumSections , FileSections , and LineSections int nStrata = strata . size ( ) ; for ( int i = 0 ; i < nStrata ; i ++ ) { SmapStratum s = strata . get ( i ) ; out . append ( s . getString ( ) ) ; } // end the SMAP out . append ( "*E\n" ) ; return out . toString ( ) ;
public class PSBroker { /** * Forces the given subscriber to unsubscribe from the given type of * messages . * @ param < T > * - The type to unsubscribe from . * @ param s * - The subscriber that will be forced to unsubscribe . * @ param messageType * - The type of message . * @ return Returns true if the subscriber was forced to unsubscribe from the * given type pf messages . */ public < T > boolean unsubscribe ( Subscriber < T > s , Class < T > messageType ) { } }
return subscribeStrategy . unsubscribe ( mapping , s , messageType ) ;