signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JournalSegment { /** * Writes a partial journal entry . The entry will only be completed
* after completeWrite ( ) has been called .
* The write will return false if the journal segment is full . The caller
* needs to retry the write with a new segment . */
public boolean write ( byte [ ] buffer , int offset , int length ) { } } | if ( length == 0 ) { return true ; // throw new IllegalStateException ( ) ;
} int count = length / BLOCK_SIZE + 1 ; if ( _tailAddress <= _index + length + PAD + 2 * count ) { return false ; } _crc . update ( buffer , offset , length ) ; byte [ ] headerBuffer = _headerBuffer ; while ( length > 0 ) { int sublen = Math . min ( length , BLOCK_SIZE ) ; BitsUtil . writeInt16 ( headerBuffer , 0 , sublen ) ; writeImpl ( headerBuffer , 0 , 2 ) ; writeImpl ( buffer , offset , sublen ) ; length -= sublen ; offset += sublen ; } return true ; |
public class ConstantWriter { /** * / * If name requires escapes , put them in , so it can be a string body . */
private static String addEscapes ( String name ) { } } | String esc = "\\\"\n\t" ; String rep = "\\\"nt" ; StringBuilder buf = null ; int nextk = 0 ; int len = name . length ( ) ; for ( int k = 0 ; k < len ; k ++ ) { char cp = name . charAt ( k ) ; int n = esc . indexOf ( cp ) ; if ( n >= 0 ) { if ( buf == null ) buf = new StringBuilder ( len * 2 ) ; if ( nextk < k ) buf . append ( name , nextk , k ) ; buf . append ( '\\' ) ; buf . append ( rep . charAt ( n ) ) ; nextk = k + 1 ; } } if ( buf == null ) return name ; if ( nextk < len ) buf . append ( name , nextk , len ) ; return buf . toString ( ) ; |
public class GivenColumn { /** * { @ inheritDoc } */
public Statistics doCell ( Example cell ) throws Exception { } } | Statistics stats = new Statistics ( ) ; Call call = new Call ( send ) ; call . will ( Annotate . exception ( cell ) ) . when ( ResultIs . exception ( ) ) ; call . will ( Compile . statistics ( stats ) ) . when ( ResultIs . exception ( ) ) ; call . execute ( cell . getContent ( ) ) ; return stats ; |
public class URI { /** * Compare this URI to another object .
* @ param obj the object to be compared .
* @ return 0 , if it ' s same ,
* - 1 , if failed , first being compared with in the authority component
* @ throws ClassCastException not URI argument */
@ Override public int compareTo ( Object obj ) throws ClassCastException { } } | URI another = ( URI ) obj ; if ( ! equals ( _authority , another . getRawAuthority ( ) ) ) { return - 1 ; } return toString ( ) . compareTo ( another . toString ( ) ) ; |
public class HibernateTemplate { /** * Prepare the given Query object , applying cache settings and / or a
* transaction timeout .
* @ param queryObject
* the Query object to prepare
* @ see # setCacheQueries
* @ see # setQueryCacheRegion
* @ see SessionProviderHolder # applyTransactionTimeout */
protected void prepareQuery ( Query queryObject ) { } } | if ( isCacheQueries ( ) ) { queryObject . setCacheable ( true ) ; if ( getQueryCacheRegion ( ) != null ) { queryObject . setCacheRegion ( getQueryCacheRegion ( ) ) ; } } if ( getFetchSize ( ) > 0 ) { queryObject . setFetchSize ( getFetchSize ( ) ) ; } if ( getMaxResults ( ) > 0 ) { queryObject . setMaxResults ( getMaxResults ( ) ) ; } if ( getFirstResult ( ) > 0 ) { queryObject . setFirstResult ( getFirstResult ( ) ) ; } |
public class ServerStateMachine { /** * Applies all commits up to the given index .
* Calls to this method are assumed not to expect a result . This allows some optimizations to be
* made internally since linearizable events don ' t have to be waited to complete the command .
* @ param index The index up to which to apply commits . */
public void applyAll ( long index ) { } } | if ( ! log . isOpen ( ) ) return ; // If the effective commit index is greater than the last index applied to the state machine then apply remaining entries .
long lastIndex = Math . min ( index , log . lastIndex ( ) ) ; if ( lastIndex > lastApplied ) { for ( long i = lastApplied + 1 ; i <= lastIndex ; i ++ ) { Entry entry = log . get ( i ) ; if ( entry != null ) { apply ( entry ) . whenComplete ( ( result , error ) -> entry . release ( ) ) ; } setLastApplied ( i ) ; } } |
public class EntityTypesClient { /** * Creates an entity type in the specified agent .
* < p > Sample code :
* < pre > < code >
* try ( EntityTypesClient entityTypesClient = EntityTypesClient . create ( ) ) {
* ProjectAgentName parent = ProjectAgentName . of ( " [ PROJECT ] " ) ;
* EntityType entityType = EntityType . newBuilder ( ) . build ( ) ;
* String languageCode = " " ;
* EntityType response = entityTypesClient . createEntityType ( parent . toString ( ) , entityType , languageCode ) ;
* < / code > < / pre >
* @ param parent Required . The agent to create a entity type for . Format : ` projects / & lt ; Project
* ID & gt ; / agent ` .
* @ param entityType Required . The entity type to create .
* @ param languageCode Optional . The language of entity synonyms defined in ` entity _ type ` . If not
* specified , the agent ' s default language is used . [ Many
* languages ] ( https : / / cloud . google . com / dialogflow - enterprise / docs / reference / language ) are
* supported . Note : languages must be enabled in the agent before they can be used .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final EntityType createEntityType ( String parent , EntityType entityType , String languageCode ) { } } | CreateEntityTypeRequest request = CreateEntityTypeRequest . newBuilder ( ) . setParent ( parent ) . setEntityType ( entityType ) . setLanguageCode ( languageCode ) . build ( ) ; return createEntityType ( request ) ; |
public class Mail { /** * Sets the org . Jodd . Email instance that the email is based on
* @ param email The Email instance
* @ return A mail object instance */
public Mail withBuilder ( Email email ) { } } | Objects . requireNonNull ( email , Required . EMAIL . toString ( ) ) ; this . email = email ; return this ; |
public class Encodes { /** * Base62(0_9A _ Za _ z ) 编码数字 , long - & gt ; String 。
* @ param num
* 数字
* @ return 编码结果 */
public static String encodeBase62 ( final long num ) { } } | long index = Math . abs ( num ) ; int base = BASE62 . length ( ) ; StringBuilder sb = new StringBuilder ( ) ; for ( ; index > 0 ; index /= base ) { sb . append ( BASE62 . charAt ( ( int ) ( index % base ) ) ) ; } return sb . toString ( ) ; |
public class BaseXmlImporter { /** * Check if item with uuid = identifier exists . If no item exist return same
* identifier . If same uuid item exist and depend on uuidBehavior do :
* < ol >
* < li > IMPORT _ UUID _ CREATE _ NEW - return null . Caller will create new
* identifier . < / li >
* < li > IMPORT _ UUID _ COLLISION _ REMOVE _ EXISTING - Remove same uuid item and his
* subtree . Also if item MIX _ VERSIONABLE , remove version history < / li >
* < li > IMPORT _ UUID _ COLLISION _ REPLACE _ EXISTING - Remove same uuid item and his
* subtree . Also if item MIX _ VERSIONABLE , remove version history < / li >
* < li > IMPORT _ UUID _ COLLISION _ THROW - throw new ItemExistsException < / li >
* < / ol >
* @ param identifier
* @ return
* @ throws RepositoryException */
protected String validateUuidCollision ( final String identifier ) throws RepositoryException { } } | String newIdentifer = identifier ; if ( identifier != null ) { try { NodeData sameUuidItem = ( NodeData ) dataConsumer . getItemData ( identifier ) ; ItemState lastState = getLastItemState ( identifier ) ; if ( sameUuidItem != null && ( lastState == null || ! lastState . isDeleted ( ) ) ) { boolean isMixVersionable = nodeTypeDataManager . isNodeType ( Constants . MIX_VERSIONABLE , sameUuidItem . getMixinTypeNames ( ) ) ; switch ( uuidBehavior ) { case ImportUUIDBehavior . IMPORT_UUID_CREATE_NEW : // Incoming referenceable nodes are assigned newly created UUIDs
// upon addition to the workspace . As a result UUID collisions
// never occur .
// reset UUID and it will be autocreated in session
newIdentifer = null ; break ; case ImportUUIDBehavior . IMPORT_UUID_COLLISION_REMOVE_EXISTING : // remove version history before removing item
if ( isMixVersionable ) { removeVersionHistory ( sameUuidItem ) ; } removeExisted ( sameUuidItem ) ; break ; case ImportUUIDBehavior . IMPORT_UUID_COLLISION_REPLACE_EXISTING : // remove version history before removing item
if ( isMixVersionable ) { removeVersionHistory ( sameUuidItem ) ; } removeExisted ( sameUuidItem ) ; ItemData parentOfsameUuidItem = dataConsumer . getItemData ( sameUuidItem . getParentIdentifier ( ) ) ; ImportNodeData temporaryParent = ImportNodeData . createCopy ( ( NodeData ) parentOfsameUuidItem ) ; temporaryParent . setTemporary ( true ) ; tree . push ( temporaryParent ) ; break ; case ImportUUIDBehavior . IMPORT_UUID_COLLISION_THROW : // If an incoming referenceable node has the same UUID as a node
// already existing in the workspace then a SAXException is thrown
// by the ContentHandler during deserialization .
throw new JCRItemExistsException ( "An incoming referenceable node has the same " + "UUID as a node already existing in the workspace! UUID:" + identifier , identifier ) ; default : } } } catch ( ItemNotFoundException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } } return newIdentifer ; |
public class AnomalySTLTransform { /** * Output : anomaly score between 0 and 100 */
private double anomalyScore ( double x , double mean , double sd ) { } } | x = ( x - mean ) / sd ; x = normPDF ( x ) ; x = ( max_norm_prob - x ) / max_norm_prob ; x *= 100 ; return x ; |
public class BaseExtension { /** * Get the extension for the name and table name
* @ param extensionName
* extension name
* @ param tableName
* table name
* @ return extension */
protected List < Extensions > getExtensions ( String extensionName , String tableName ) { } } | List < Extensions > extensions = null ; try { if ( extensionsDao . isTableExists ( ) ) { extensions = extensionsDao . queryByExtension ( extensionName , tableName ) ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to query for '" + extensionName + "' extension for GeoPackage: " + geoPackage . getName ( ) + ", Table Name: " + tableName , e ) ; } return extensions ; |
public class FFMQConnectionFactory { /** * Lookup the provider URI */
protected final URI getProviderURI ( ) throws JMSException { } } | String providerURL = getProviderURL ( ) ; URI parsedURL ; try { parsedURL = new URI ( providerURL ) ; } catch ( URISyntaxException e ) { throw new FFMQException ( "Malformed provider URL : " + providerURL , "INVALID_PROVIDER_URL" ) ; } if ( ! parsedURL . isAbsolute ( ) ) throw new FFMQException ( "Invalid provider URL : " + providerURL , "INVALID_PROVIDER_URL" ) ; return parsedURL ; |
public class ConnectionPoolConfigurationImpl { /** * ( non - Javadoc )
* @ see com . netflix . cassandra . ConnectionPoolConfiguration # getSeedHosts ( ) */
@ Override public List < Host > getSeedHosts ( ) { } } | List < Host > hosts = new ArrayList < Host > ( ) ; if ( seeds != null ) { for ( String seed : seeds . split ( "," ) ) { seed = seed . trim ( ) ; if ( seed . length ( ) > 0 ) { hosts . add ( new Host ( seed , this . port ) ) ; } } } return hosts ; |
public class Document { /** * < p > Getter for the field < code > repository < / code > . < / p >
* @ return a { @ link com . greenpepper . server . domain . Repository } object . */
@ ManyToOne ( cascade = { } } | CascadeType . PERSIST , CascadeType . MERGE } ) @ JoinColumn ( name = "REPOSITORY_ID" ) public Repository getRepository ( ) { return this . repository ; |
public class LabAccountsInner { /** * List lab accounts in a resource group .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; LabAccountInner & gt ; object */
public Observable < Page < LabAccountInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } } | return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < LabAccountInner > > , Page < LabAccountInner > > ( ) { @ Override public Page < LabAccountInner > call ( ServiceResponse < Page < LabAccountInner > > response ) { return response . body ( ) ; } } ) ; |
public class DiskClient { /** * Retrieves a list of persistent disks contained within the specified zone .
* < p > Sample code :
* < pre > < code >
* try ( DiskClient diskClient = DiskClient . create ( ) ) {
* ProjectZoneName zone = ProjectZoneName . of ( " [ PROJECT ] " , " [ ZONE ] " ) ;
* for ( Disk element : diskClient . listDisks ( zone . toString ( ) ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param zone The name of the zone for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final ListDisksPagedResponse listDisks ( String zone ) { } } | ListDisksHttpRequest request = ListDisksHttpRequest . newBuilder ( ) . setZone ( zone ) . build ( ) ; return listDisks ( request ) ; |
public class CSSModuleBuilder { /** * Runs given CSS through PostCSS processor for minification and any other processing
* by configured plugins
* @ param request
* @ param css
* @ param res
* @ return The processed CSS .
* @ throws IOException */
protected String postcss ( HttpServletRequest request , String css , IResource res ) throws IOException { } } | if ( threadScopes == null ) { return css ; } Context cx = Context . enter ( ) ; Scriptable threadScope = null ; String result = null ; try { threadScope = threadScopes . poll ( SCOPE_POOL_TIMEOUT_SECONDS , TimeUnit . SECONDS ) ; if ( threadScope == null ) { throw new TimeoutException ( "Timeout waiting for thread scope" ) ; // $ NON - NLS - 1 $
} Scriptable scope = cx . newObject ( threadScope ) ; scope . setParentScope ( threadScope ) ; Scriptable postcssInstance = ( Scriptable ) threadScope . get ( POSTCSS_INSTANCE , scope ) ; Function postcssProcessor = ( Function ) postcssInstance . getPrototype ( ) . get ( PROCESS , postcssInstance ) ; Object processed = postcssProcessor . call ( cx , scope , postcssInstance , new Object [ ] { css , postcssOptions } ) ; result = Context . toString ( processed ) ; } catch ( JavaScriptException e ) { // Add module info
String message = "Error parsing " + res . getURI ( ) + "\r\n" + e . getMessage ( ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
throw new IOException ( message , e ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } catch ( TimeoutException e ) { throw new RuntimeException ( e ) ; } finally { if ( threadScope != null ) { // put the thread scope back in the queue now that we ' re done with it
threadScopes . add ( threadScope ) ; } Context . exit ( ) ; } return result ; |
public class SocketFactory { /** * Create an SSL client socket using the IOR - encoded
* security characteristics .
* Setting want / need client auth on a client socket has no effect so all we can do is use the right host , port , ciphers
* @ param host The target host name .
* @ param port The target connection port .
* @ param clientSSLConfigName name of the sslConfig used for cipher suite selection
* @ return An appropriately configured client SSLSocket .
* @ exception IOException if ssl socket can ' t be obtained and configured . */
private Socket createSSLSocket ( String host , int port , final String clientSSLConfigName ) throws IOException { } } | final SSLSocketFactory factory = getSocketFactory ( clientSSLConfigName ) ; SSLSocket socket = ( SSLSocket ) factory . createSocket ( host , port ) ; socket . setSoTimeout ( 60 * 1000 ) ; // get a set of cipher suites appropriate for this connections requirements .
// We request this for each connection , since the outgoing IOR ' s requirements may be different from
// our server listener requirements .
String [ ] iorSuites ; try { iorSuites = ( String [ ] ) AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { @ Override public Object run ( ) throws Exception { return sslConfig . getCipherSuites ( clientSSLConfigName , factory . getSupportedCipherSuites ( ) ) ; } } ) ; } catch ( PrivilegedActionException pae ) { throw new IOException ( "Could not configure client socket" , pae . getCause ( ) ) ; } socket . setEnabledCipherSuites ( iorSuites ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . debug ( tc , "Created SSL socket to " + host + ":" + port ) ; Tr . debug ( tc , " cipher suites:" ) ; for ( int i = 0 ; i < iorSuites . length ; i ++ ) { Tr . debug ( tc , " " + iorSuites [ i ] ) ; } socket . addHandshakeCompletedListener ( new HandshakeCompletedListener ( ) { @ Override public void handshakeCompleted ( HandshakeCompletedEvent handshakeCompletedEvent ) { Certificate [ ] certs = handshakeCompletedEvent . getLocalCertificates ( ) ; if ( certs != null ) { Tr . debug ( tc , "handshake returned local certs count: " + certs . length ) ; for ( int i = 0 ; i < certs . length ; i ++ ) { Certificate cert = certs [ i ] ; Tr . debug ( tc , "cert: " + cert . toString ( ) ) ; } } else { Tr . debug ( tc , "handshake returned no local certs" ) ; } } } ) ; } return socket ; |
public class ProgrammaticWrappingProxyInstaller { /** * Wrap a constructor doc .
* @ param source the source
* @ return the wrapper . */
public ConstructorDoc wrap ( ConstructorDoc source ) { } } | if ( source == null || source instanceof Proxy < ? > || ! ( source instanceof ConstructorDocImpl ) ) { return source ; } return new ConstructorDocWrapper ( ( ConstructorDocImpl ) source ) ; |
public class MongodbQueue { /** * { @ inheritDoc } */
@ Override public boolean requeueSilent ( IQueueMessage < ID , DATA > _msg ) { } } | IQueueMessage < ID , DATA > msg = _msg . clone ( ) ; return upsertToCollection ( msg ) ; |
public class TitanFactory { private static ReadConfiguration getLocalConfiguration ( String shortcutOrFile ) { } } | File file = new File ( shortcutOrFile ) ; if ( file . exists ( ) ) return getLocalConfiguration ( file ) ; else { int pos = shortcutOrFile . indexOf ( ':' ) ; if ( pos < 0 ) pos = shortcutOrFile . length ( ) ; String backend = shortcutOrFile . substring ( 0 , pos ) ; Preconditions . checkArgument ( StandardStoreManager . getAllManagerClasses ( ) . containsKey ( backend . toLowerCase ( ) ) , "Backend shorthand unknown: %s" , backend ) ; String secondArg = null ; if ( pos + 1 < shortcutOrFile . length ( ) ) secondArg = shortcutOrFile . substring ( pos + 1 ) . trim ( ) ; BaseConfiguration config = new BaseConfiguration ( ) ; ModifiableConfiguration writeConfig = new ModifiableConfiguration ( ROOT_NS , new CommonsConfiguration ( config ) , BasicConfiguration . Restriction . NONE ) ; writeConfig . set ( STORAGE_BACKEND , backend ) ; ConfigOption option = Backend . getOptionForShorthand ( backend ) ; if ( option == null ) { Preconditions . checkArgument ( secondArg == null ) ; } else if ( option == STORAGE_DIRECTORY || option == STORAGE_CONF_FILE ) { Preconditions . checkArgument ( StringUtils . isNotBlank ( secondArg ) , "Need to provide additional argument to initialize storage backend" ) ; writeConfig . set ( option , getAbsolutePath ( secondArg ) ) ; } else if ( option == STORAGE_HOSTS ) { Preconditions . checkArgument ( StringUtils . isNotBlank ( secondArg ) , "Need to provide additional argument to initialize storage backend" ) ; writeConfig . set ( option , new String [ ] { secondArg } ) ; } else throw new IllegalArgumentException ( "Invalid configuration option for backend " + option ) ; return new CommonsConfiguration ( config ) ; } |
public class StringSearch { /** * Gets a substring out of a CharacterIterator
* Java porting note : Not available in ICU4C
* @ param text CharacterIterator
* @ param start start offset
* @ param length of substring
* @ return substring from text starting at start and length length */
private static final String getString ( CharacterIterator text , int start , int length ) { } } | StringBuilder result = new StringBuilder ( length ) ; int offset = text . getIndex ( ) ; text . setIndex ( start ) ; for ( int i = 0 ; i < length ; i ++ ) { result . append ( text . current ( ) ) ; text . next ( ) ; } text . setIndex ( offset ) ; return result . toString ( ) ; |
public class ConcurrentSubList { /** * ( non - Javadoc )
* @ see SimplifiedSerialization . readObject ( java . io . DataInputStream , ObjectManagerState ) */
public void readObject ( java . io . DataInputStream dataInputStream , ObjectManagerState objectManagerState ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "readObject" , "dataInputStream=" + dataInputStream + " objectManagerState=" + objectManagerState ) ; try { byte version = dataInputStream . readByte ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isDebugEnabled ( ) ) trace . debug ( this , cclass , "readObject" , "version=" + version + "(byte)" ) ; super . readObject ( dataInputStream , objectManagerState ) ; // Re - establish the owning ConcurrentLinkedList .
concurrentListToken = Token . restore ( dataInputStream , objectManagerState ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed .
ObjectManager . ffdc . processException ( this , cclass , "readObject" , exception , "1:497:1.23" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readObject" , "via PermanentIOException" ) ; throw new PermanentIOException ( this , exception ) ; } // catch ( java . io . IOException exception ) .
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "readObject" ) ; |
public class ToastJsGenerator { /** * Generates the javascript template code from the given map and the given method name that will
* be used to interpolate with the values of the given map .
* @ param variables
* the map with the javascript options .
* @ param methodName
* The method name .
* @ return The generated javascript from the given map and the given method name . */
@ Override public String generateJavascriptTemplateContent ( final Map < String , Object > variables , final String methodName ) { } } | final StringBuilder sb = new StringBuilder ( ) ; if ( isWithDocumentReadyFunction ( ) ) { sb . append ( DOCUMENT_READY_FUNCTION_PREFIX ) . append ( "\n" ) . append ( "\n" ) ; } for ( final Map . Entry < String , Object > entry : variables . entrySet ( ) ) { final String key = entry . getKey ( ) ; if ( ! COMMAND . equals ( key ) ) { sb . append ( key ) . append ( "=${" ) . append ( key ) . append ( "};" ) ; sb . append ( "\n" ) ; } } sb . append ( "\n" ) ; sb . append ( "${" + COMMAND + "};" ) ; sb . append ( "\n" ) ; if ( isWithDocumentReadyFunction ( ) ) { sb . append ( DOCUMENT_READY_FUNCTION_SUFFIX ) ; } return sb . toString ( ) ; |
public class LabeledEnumLabelPanel { /** * Factory method for create a new { @ link EnumLabel } . This method is invoked in the constructor
* from the derived classes and can be overridden so users can provide their own version of a
* new { @ link EnumLabel } .
* @ param id
* the id
* @ param model
* the model of the label
* @ return the new { @ link EnumLabel } . */
@ SuppressWarnings ( { } } | "rawtypes" , "unchecked" } ) protected EnumLabel newEnumLabel ( final String id , final IModel < T > model ) { final IModel viewableLabelModel = new PropertyModel ( model . getObject ( ) , this . getId ( ) ) ; return ComponentFactory . newEnumLabel ( id , viewableLabelModel ) ; |
public class AlertEntityConditionService { /** * Removes the given entity from the alert condition with the given id .
* @ param entity The entity to remove
* @ param conditionId The id of the alert condition from which to delete the entity
* @ return This object */
public AlertEntityConditionService remove ( Entity entity , long conditionId ) { } } | return remove ( entity . getId ( ) , entity . getType ( ) , conditionId ) ; |
public class MacAuthUtils { /** * Returns a nonce value . The nonce value MUST be unique
* across all requests with the same MAC key identifier .
* @ see < a href = " https : / / tools . ietf . org / html / draft - hammer - oauth - v2 - mac - token - 05 # section - 3.1 " > 3.1.
* The " Authorization " Request Header < / a >
* @ param issueTime the number of seconds since the credentials were issued to the client
* @ return nonce value */
public static String getNonce ( long issueTime ) { } } | long currentTime = new Date ( ) . getTime ( ) ; return TimeUnit . MILLISECONDS . toSeconds ( currentTime - issueTime ) + ":" + Long . toString ( System . nanoTime ( ) ) ; |
public class OnEntryScriptTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setScriptFormat ( String newScriptFormat ) { } } | String oldScriptFormat = scriptFormat ; scriptFormat = newScriptFormat ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , DroolsPackage . ON_ENTRY_SCRIPT_TYPE__SCRIPT_FORMAT , oldScriptFormat , scriptFormat ) ) ; |
public class CommandHelper { /** * Convert the value according the type of DeviceData .
* @ param stringValues
* the value to insert on DeviceData
* @ param deviceDataArgin
* the DeviceData attribute to write
* @ param dataType
* the type of inserted data
* @ throws DevFailed */
public static void insertFromStringArray ( final String [ ] stringValues , final DeviceData deviceDataArgin , final int dataType ) throws DevFailed { } } | // by default for xdim = 1 , send the first value .
String firsString = "" ; Double numericalValue = Double . NaN ; if ( stringValues . length > 0 ) { firsString = stringValues [ 0 ] ; } try { numericalValue = Double . valueOf ( firsString ) ; } catch ( final Exception e ) { numericalValue = Double . NaN ; } switch ( dataType ) { case TangoConst . Tango_DEV_SHORT : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . shortValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_SHORT" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_USHORT : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert_us ( numericalValue . shortValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_USHORT" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_CHAR : deviceDataArgin . insert ( numericalValue . byteValue ( ) ) ; Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type Tango_DEV_CHAR not supported" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; break ; case TangoConst . Tango_DEV_UCHAR : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . shortValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_UCHAR" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_LONG : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . intValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_LONG" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_ULONG : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert_ul ( numericalValue . longValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_ULONG" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_LONG64 : Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type Tango_DEV_LONG64 not supported" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; break ; case TangoConst . Tango_DEV_ULONG64 : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert_u64 ( numericalValue . longValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_ULONG64" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_INT : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . intValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_INT" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_FLOAT : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . floatValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_FLOAT" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_DOUBLE : if ( ! numericalValue . isNaN ( ) ) { deviceDataArgin . insert ( numericalValue . doubleValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_DOUBLE" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_STRING : deviceDataArgin . insert ( firsString ) ; break ; case TangoConst . Tango_DEV_BOOLEAN : if ( ! numericalValue . isNaN ( ) ) { if ( numericalValue . doubleValue ( ) == 1 ) { deviceDataArgin . insert ( true ) ; } else { deviceDataArgin . insert ( false ) ; } } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , firsString + " is not a Tango_DEV_BOOLEAN" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } break ; case TangoConst . Tango_DEV_STATE : Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "Tango_DEV_STATE is not supported" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; break ; // Array input type
case TangoConst . Tango_DEVVAR_SHORTARRAY : final short [ ] shortValues = new short [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { shortValues [ i ] = Double . valueOf ( stringValues [ i ] ) . shortValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_SHORTARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( shortValues ) ; break ; case TangoConst . Tango_DEVVAR_USHORTARRAY : final short [ ] ushortValues = new short [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { ushortValues [ i ] = Double . valueOf ( stringValues [ i ] ) . shortValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_USHORTARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert_us ( ushortValues ) ; break ; case TangoConst . Tango_DEVVAR_CHARARRAY : final byte [ ] byteValues = new byte [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { byteValues [ i ] = Double . valueOf ( stringValues [ i ] ) . byteValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_CHARARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( byteValues ) ; break ; case TangoConst . Tango_DEVVAR_LONGARRAY : final int [ ] longValues = new int [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { longValues [ i ] = Double . valueOf ( stringValues [ i ] ) . intValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_LONGARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( longValues ) ; break ; case TangoConst . Tango_DEVVAR_ULONGARRAY : final long [ ] ulongValues = new long [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { ulongValues [ i ] = Double . valueOf ( stringValues [ i ] ) . longValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_LONGARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert_ul ( ulongValues ) ; break ; case TangoConst . Tango_DEVVAR_LONG64ARRAY : final int [ ] long64Values = new int [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { long64Values [ i ] = Double . valueOf ( stringValues [ i ] ) . intValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_LONGARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( long64Values ) ; break ; case TangoConst . Tango_DEVVAR_ULONG64ARRAY : final long [ ] ulong64Values = new long [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { ulong64Values [ i ] = Double . valueOf ( stringValues [ i ] ) . longValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_LONGARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert_u64 ( ulong64Values ) ; break ; case TangoConst . Tango_DEVVAR_FLOATARRAY : final float [ ] floatValues = new float [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { floatValues [ i ] = Double . valueOf ( stringValues [ i ] ) . floatValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_FLOATARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( floatValues ) ; break ; case TangoConst . Tango_DEVVAR_DOUBLEARRAY : final double [ ] doubleValues = new double [ stringValues . length ] ; for ( int i = 0 ; i < stringValues . length ; i ++ ) { try { doubleValues [ i ] = Double . valueOf ( stringValues [ i ] ) . doubleValue ( ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input is not a Tango_DEVVAR_DOUBLEARRAY" , "CommandHelper.insertFromStringArray(String[] values,deviceDataArgin)" ) ; } } deviceDataArgin . insert ( doubleValues ) ; break ; case TangoConst . Tango_DEVVAR_STRINGARRAY : deviceDataArgin . insert ( stringValues ) ; break ; default : Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type " + deviceDataArgin . getType ( ) + " not supported" , "CommandHelper.insertFromStringArray(String[] value,deviceDataArgin)" ) ; break ; } |
public class BundleWatcher { /** * Un - registers each entry from the unregistered bundle by first notifying the observers . If an exception appears
* during notification , it is ignored .
* @ param bundle the un - registred bundle */
private void unregister ( final Bundle bundle ) { } } | if ( bundle == null ) return ; // no need to go any further , system probably stopped .
LOG . debug ( "Releasing bundle [" + bundle . getSymbolicName ( ) + "]" ) ; final List < T > resources = m_mappings . get ( bundle ) ; if ( resources != null && resources . size ( ) > 0 ) { LOG . debug ( "Un-registering " + resources ) ; for ( BundleObserver < T > observer : m_observers ) { try { observer . removingEntries ( bundle , Collections . unmodifiableList ( resources ) ) ; } catch ( Throwable ignore ) { LOG . error ( "Ignored exception during un-register" , ignore ) ; } } } m_mappings . remove ( bundle ) ; |
public class EntityREST { /** * Entity Partial Update - Allows a subset of attributes to be updated on
* an entity which is identified by its type and unique attribute eg : Referenceable . qualifiedName .
* Null updates are not possible */
@ PUT @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) @ Path ( "/uniqueAttribute/type/{typeName}" ) public EntityMutationResponse partialUpdateEntityByUniqueAttrs ( @ PathParam ( "typeName" ) String typeName , @ Context HttpServletRequest servletRequest , AtlasEntityWithExtInfo entityInfo ) throws Exception { } } | AtlasPerfTracer perf = null ; try { Map < String , Object > uniqueAttributes = getAttributes ( servletRequest ) ; if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "EntityREST.partialUpdateEntityByUniqueAttrs(" + typeName + "," + uniqueAttributes + ")" ) ; } AtlasEntityType entityType = ensureEntityType ( typeName ) ; validateUniqueAttribute ( entityType , uniqueAttributes ) ; return entitiesStore . updateByUniqueAttributes ( entityType , uniqueAttributes , entityInfo ) ; } finally { AtlasPerfTracer . log ( perf ) ; } |
public class ParameterOverrideInfoLookup { /** * Get locked parameter names for specific configuration Id .
* @ param configurationId Configuration Id
* @ return Parameter names */
public Set < String > getLockedParameterNames ( String configurationId ) { } } | Set < String > lockedParameterNamesScopeMapEntry = lockedParameterNamesScopeMap . get ( configurationId ) ; if ( lockedParameterNamesScopeMapEntry != null ) { return lockedParameterNamesScopeMapEntry ; } else { return ImmutableSet . of ( ) ; } |
public class DiskAccessOneFile { /** * WARNING : float / double values need to be converted with BitTools before used on indices . */
@ Override public CloseableIterator < ZooPC > readObjectFromIndex ( ZooFieldDef field , long minValue , long maxValue , boolean loadFromCache ) { } } | SchemaIndexEntry se = schemaIndex . getSchema ( field . getDeclaringType ( ) ) ; LongLongIndex fieldInd = se . getIndex ( field ) ; LLEntryIterator iter = fieldInd . iterator ( minValue , maxValue ) ; return new ObjectIterator ( iter , cache , this , objectReader , loadFromCache ) ; |
public class TypeCheckingExtension { /** * Builds a parametrized class node representing the Map & lt ; keyType , valueType & gt ; type .
* @ param keyType the classnode type of the key
* @ param valueType the classnode type of the value
* @ return a class node for Map & lt ; keyType , valueType & gt ;
* @ since 2.2.0 */
public ClassNode buildMapType ( ClassNode keyType , ClassNode valueType ) { } } | return parameterizedType ( ClassHelper . MAP_TYPE , keyType , valueType ) ; |
public class DatatypeConverter { /** * Print duration time units .
* Note that we don ' t differentiate between confirmed and unconfirmed
* durations . Unrecognised duration types are default to hours .
* @ param value Duration units
* @ param estimated is this an estimated duration
* @ return BigInteger value */
public static final BigInteger printDurationTimeUnits ( TimeUnit value , boolean estimated ) { } } | int result ; if ( value == null ) { value = TimeUnit . HOURS ; } switch ( value ) { case MINUTES : { result = ( estimated ? 35 : 3 ) ; break ; } case ELAPSED_MINUTES : { result = ( estimated ? 36 : 4 ) ; break ; } case ELAPSED_HOURS : { result = ( estimated ? 38 : 6 ) ; break ; } case DAYS : { result = ( estimated ? 39 : 7 ) ; break ; } case ELAPSED_DAYS : { result = ( estimated ? 40 : 8 ) ; break ; } case WEEKS : { result = ( estimated ? 41 : 9 ) ; break ; } case ELAPSED_WEEKS : { result = ( estimated ? 42 : 10 ) ; break ; } case MONTHS : { result = ( estimated ? 43 : 11 ) ; break ; } case ELAPSED_MONTHS : { result = ( estimated ? 44 : 12 ) ; break ; } case PERCENT : { result = ( estimated ? 51 : 19 ) ; break ; } case ELAPSED_PERCENT : { result = ( estimated ? 52 : 20 ) ; break ; } default : case HOURS : { result = ( estimated ? 37 : 5 ) ; break ; } } return ( BigInteger . valueOf ( result ) ) ; |
public class JdbcTable { /** * Add this record .
* This is the special logic to add a record without db autosequence .
* < br / > Read the largest key .
* < br / > Write the next largest ( and loop until you get one )
* < br / > Save the counter for possible get last modified call .
* @ exception DBException File exception . */
public void doAddAutosequence ( Record record ) throws DBException { } } | record = record . getBaseRecord ( ) ; // Must operate on the raw table
String strRecordset = null ; m_LastModifiedBookmark = null ; CounterField fldID = ( CounterField ) record . getCounterField ( ) ; // First step - get the largest current key
// Save current order and ascending / descending and selection
int iOrigOrder = record . getDefaultOrder ( ) ; boolean bOrigDirection = record . getKeyArea ( DBConstants . MAIN_KEY_AREA ) . getKeyField ( DBConstants . MAIN_KEY_FIELD ) . getKeyOrder ( ) ; boolean bCounterSelected = fldID . isSelected ( ) ; boolean brgSelected [ ] = new boolean [ record . getFieldCount ( ) ] ; for ( int iFieldSeq = DBConstants . MAIN_FIELD ; iFieldSeq <= record . getFieldCount ( ) + DBConstants . MAIN_FIELD - 1 ; iFieldSeq ++ ) { brgSelected [ iFieldSeq ] = record . getField ( iFieldSeq ) . isSelected ( ) ; record . getField ( iFieldSeq ) . setSelected ( false ) ; // De - select all fields
} fldID . setSelected ( true ) ; // Only select the counter field
// Set reverse order , descending
record . setKeyArea ( DBConstants . MAIN_KEY_AREA ) ; record . getKeyArea ( DBConstants . MAIN_KEY_AREA ) . getKeyField ( DBConstants . MAIN_KEY_FIELD ) . setKeyOrder ( DBConstants . DESCENDING ) ; boolean [ ] rgbEnabled = record . setEnableListeners ( false ) ; // Temporarily disable behaviors
strRecordset = record . getSQLQuery ( false , null ) ; ResultSet resultSet = this . executeQuery ( strRecordset , DBConstants . SQL_AUTOSEQUENCE_TYPE , null ) ; this . setResultSet ( resultSet , DBConstants . SQL_AUTOSEQUENCE_TYPE ) ; // Set back , before I forget .
record . setKeyArea ( iOrigOrder ) ; record . getKeyArea ( DBConstants . MAIN_KEY_AREA ) . getKeyField ( DBConstants . MAIN_KEY_FIELD ) . setKeyOrder ( bOrigDirection ) ; try { m_iColumn = 1 ; boolean bMore = resultSet . next ( ) ; if ( ! bMore ) { int iStartingID = record . getStartingID ( ) ; fldID . setValue ( iStartingID - 1 , DBConstants . DONT_DISPLAY , DBConstants . READ_MOVE ) ; } else this . dataToField ( fldID ) ; // Move the high value to the ID
fldID . setModified ( true ) ; // Just to be sure
} catch ( SQLException ex ) { record . setEnableListeners ( rgbEnabled ) ; // Re - enable behaviors
throw this . getDatabase ( ) . convertError ( ex ) ; } finally { for ( int iFieldSeq = DBConstants . MAIN_FIELD ; iFieldSeq <= record . getFieldCount ( ) + DBConstants . MAIN_FIELD - 1 ; iFieldSeq ++ ) { record . getField ( iFieldSeq ) . setSelected ( brgSelected [ iFieldSeq ] ) ; } fldID . setSelected ( bCounterSelected ) ; this . setResultSet ( null , DBConstants . SQL_AUTOSEQUENCE_TYPE ) ; // Close resultset . If I share statements with an active query , reset the active query ( Ouch - serious performance hit )
} if ( ! bCounterSelected ) fldID . setSelected ( true ) ; // Counter must be selected to write this record
// Second step - bump the key and write until successful .
int iCount = 0 ; while ( iCount ++ < MAX_DUPLICATE_ATTEMPTS ) { fldID . setValue ( fldID . getValue ( ) + 1 , DBConstants . DONT_DISPLAY , DBConstants . READ_MOVE ) ; // Bump counter
strRecordset = record . getSQLInsert ( SQLParams . USE_INSERT_UPDATE_LITERALS ) ; int iType = DBConstants . SQL_INSERT_TABLE_TYPE ; int iRowsUpdated = 0 ; try { iRowsUpdated = this . executeUpdate ( strRecordset , iType ) ; } catch ( DBException ex ) { if ( ex . getErrorCode ( ) == DBConstants . DUPLICATE_COUNTER ) // Duplicate key
{ Utility . getLogger ( ) . info ( "Duplicate key, bumping value" ) ; iRowsUpdated = 0 ; } else { if ( ! bCounterSelected ) // Set back to orig value
fldID . setSelected ( bCounterSelected ) ; record . setEnableListeners ( rgbEnabled ) ; // Re - enable behaviors
throw ex ; } } if ( iRowsUpdated == 1 ) break ; // Success ! ! !
} if ( ! bCounterSelected ) // Set back to orig value
fldID . setSelected ( bCounterSelected ) ; record . setEnableListeners ( rgbEnabled ) ; // Re - enable behaviors
if ( iCount > MAX_DUPLICATE_ATTEMPTS ) throw new DBException ( DBConstants . DUPLICATE_KEY ) ; // Highly unlikely
// Third step - Save the counter for possible getLastModified call
m_LastModifiedBookmark = record . getHandle ( DBConstants . BOOKMARK_HANDLE ) ; |
public class MockMessage { /** * / * ( non - Javadoc )
* @ see javax . mail . internet . MimeMessage # setRecipients ( javax . mail . Message . RecipientType , javax . mail . Address [ ] ) */
@ Override public void setRecipients ( final javax . mail . Message . RecipientType type , final Address [ ] addresses ) throws MessagingException { } } | throw new IllegalWriteException ( "Mock messages are read-only" ) ; |
public class Percentile { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > specify the properties over which to perform the percentileDisc or PercentileCont calculation < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . aggregate ( ) . percentileDisc ( 0.5 ) . < b > over ( n . property ( " age " ) ) < / b > . . . < / i > < / div >
* < br / > */
public RElement < RElement < ? > > over ( JcProperty property ) { } } | ReturnExpression rx = ( ReturnExpression ) this . astNode ; ReturnAggregate ra = ( ReturnAggregate ) rx . getReturnValue ( ) ; ra . setArgument ( property ) ; RElement < RElement < ? > > ret = new RElement < RElement < ? > > ( rx ) ; return ret ; |
public class Latkes { /** * Gets a property specified by the given key from file " local . properties " .
* @ param key the given key
* @ return the value , returns { @ code null } if not found */
public static String getLocalProperty ( final String key ) { } } | String ret = localProps . getProperty ( key ) ; if ( StringUtils . isBlank ( ret ) ) { return ret ; } ret = replaceEnvVars ( ret ) ; return ret ; |
public class DrpcFetchHelper { /** * 処理が失敗した場合の応答を返す 。
* @ param requestId リクエストID
* @ throws TException 応答を返す際に失敗した場合 */
public void fail ( String requestId ) throws TException { } } | int index = this . requestedMap . remove ( requestId ) ; DRPCInvocationsClient client = this . clients . get ( index ) ; client . failRequest ( requestId ) ; |
public class Data { /** * TODO : What does this method do ? What is parameter i ( rename ) ? */
@ Nonnull static String headerLetter ( final int i ) { } } | final int flags = DATA_MODEL . getDataFlags ( i ) ; // TODO : The " magic " of how to interpret flags must be in DataModel , not here .
if ( ( ( flags >> 7 ) & 3 ) == 1 ) { return Character . toString ( ENCODE_CHARS [ ( flags >> 11 ) & 31 ] ) ; } return "" ; |
public class ReverseMap { /** * Creates a reverse map name corresponding to an address contained in
* a String .
* @ param addr The address from which to build a name .
* @ return The name corresponding to the address in the reverse map .
* @ throws UnknownHostException The string does not contain a valid address . */
public static Name fromAddress ( String addr , int family ) throws UnknownHostException { } } | byte [ ] array = Address . toByteArray ( addr , family ) ; if ( array == null ) throw new UnknownHostException ( "Invalid IP address" ) ; return fromAddress ( array ) ; |
public class Bag { /** * Returns the bag - of - words features of a document . The features are real - valued
* in convenience of most learning algorithms although they take only integer
* or binary values . */
@ Override public double [ ] feature ( T [ ] x ) { } } | double [ ] bag = new double [ features . size ( ) ] ; if ( binary ) { for ( T word : x ) { Integer f = features . get ( word ) ; if ( f != null ) { bag [ f ] = 1.0 ; } } } else { for ( T word : x ) { Integer f = features . get ( word ) ; if ( f != null ) { bag [ f ] ++ ; } } } return bag ; |
public class QueryParserKraken { /** * Scan the next token . If the lexeme is a string , its string
* representation is in " lexeme " .
* @ return integer code for the token */
private Token scanToken ( ) { } } | Token token = _token ; if ( token != null ) { _token = null ; return token ; } int sign = 1 ; int ch ; for ( ch = read ( ) ; Character . isWhitespace ( ( char ) ch ) ; ch = read ( ) ) { } switch ( ch ) { case - 1 : return Token . EOF ; case '(' : return Token . LPAREN ; case ')' : return Token . RPAREN ; case ',' : return Token . COMMA ; case '*' : return Token . STAR ; case '-' : return Token . MINUS ; case '+' : return Token . PLUS ; case '/' : return Token . DIV ; case '%' : return Token . MOD ; case '.' : return Token . DOT ; case '?' : return Token . QUESTION_MARK ; /* case ' / ' :
case ' % ' :
case ' ? ' :
return ch ; */
/* case ' + ' :
if ( ( ch = read ( ) ) > = ' 0 ' & & ch < = ' 9 ' )
break ;
else {
unread ( ch ) ;
return ' + ' ;
case ' - ' :
if ( ( ch = read ( ) ) > = ' 0 ' & & ch < = ' 9 ' ) {
sign = - 1;
break ;
else {
unread ( ch ) ;
return ' - ' ; */
case '=' : return Token . EQ ; case '<' : if ( ( ch = read ( ) ) == '=' ) return Token . LE ; else if ( ch == '>' ) return Token . NE ; else { unread ( ch ) ; return Token . LT ; } case '>' : if ( ( ch = read ( ) ) == '=' ) return Token . GE ; else { unread ( ch ) ; return Token . GT ; } /* case ' | ' :
if ( ( ch = read ( ) ) = = ' | ' )
return CONCAT ;
else {
throw error ( L . l ( " ' | ' expected at { 0 } " , charName ( ch ) ) ) ;
/ / @ @ is useless ?
case ' @ ' :
if ( ( ch = read ( ) ) ! = ' @ ' )
throw error ( L . l ( " ` @ ' expected at { 0 } " , charName ( ch ) ) ) ;
return scanToken ( ) ; */
} if ( Character . isJavaIdentifierStart ( ( char ) ch ) || ch == ':' ) { CharBuffer cb = _cb ; cb . clear ( ) ; for ( ; ch > 0 && isIdentifierPart ( ( char ) ch ) ; ch = read ( ) ) { cb . append ( ( char ) ch ) ; } unread ( ch ) ; _lexeme = cb . toString ( ) ; String lower = _lexeme . toLowerCase ( Locale . ENGLISH ) ; token = _reserved . get ( lower ) ; if ( token != null ) return token ; else return Token . IDENTIFIER ; } else if ( ch >= '0' && ch <= '9' ) { CharBuffer cb = _cb ; cb . clear ( ) ; Token type = Token . INTEGER ; if ( sign < 0 ) cb . append ( '-' ) ; for ( ; ch >= '0' && ch <= '9' ; ch = read ( ) ) cb . append ( ( char ) ch ) ; if ( ch == '.' ) { type = Token . DOUBLE ; cb . append ( '.' ) ; for ( ch = read ( ) ; ch >= '0' && ch <= '9' ; ch = read ( ) ) cb . append ( ( char ) ch ) ; } if ( ch == 'e' || ch == 'E' ) { type = Token . DOUBLE ; cb . append ( 'e' ) ; if ( ( ch = read ( ) ) == '+' || ch == '-' ) { cb . append ( ( char ) ch ) ; ch = read ( ) ; } if ( ! ( ch >= '0' && ch <= '9' ) ) throw error ( L . l ( "exponent needs digits at {0}" , charName ( ch ) ) ) ; for ( ; ch >= '0' && ch <= '9' ; ch = read ( ) ) cb . append ( ( char ) ch ) ; } if ( ch == 'F' || ch == 'D' ) type = Token . DOUBLE ; else if ( ch == 'L' ) { type = Token . LONG ; } else unread ( ch ) ; _lexeme = cb . toString ( ) ; return type ; } else if ( ch == '\'' ) { CharBuffer cb = _cb ; cb . clear ( ) ; for ( ch = read ( ) ; ch >= 0 ; ch = read ( ) ) { if ( ch == '\'' ) { if ( ( ch = read ( ) ) == '\'' ) cb . append ( '\'' ) ; else { unread ( ch ) ; break ; } } else if ( ch == '\\' ) { ch = read ( ) ; if ( ch >= 0 ) cb . append ( ch ) ; } else cb . append ( ( char ) ch ) ; } _lexeme = cb . toString ( ) ; return Token . STRING ; } else if ( ch == '#' ) { // skip comment
while ( ( ch = read ( ) ) >= 0 && ch != '\n' && ch != '\r' ) { } // XXX : cleanup to avoid recursion
return scanToken ( ) ; } throw error ( L . l ( "unexpected char at {0} ({1})" , "" + ( char ) ch , String . valueOf ( ch ) ) ) ; |
public class CobolDecimalType { /** * Determines the maximum value a numeric can take .
* The maximum is constrained either from the host side by the the number of
* digits in the picture clause or by the mapping java type maximum value
* whichever is smaller .
* @ param clazz the target java numeric type
* @ param totalDigits the host total number of digits
* @ param fractionDigits the host fractional number of digits
* @ return the maximum value this java numeric can take */
@ SuppressWarnings ( "unchecked" ) private static < D extends Number > D getJavaMaxInclusive ( Class < D > clazz , int totalDigits , int fractionDigits ) { } } | int dec = totalDigits - fractionDigits ; char [ ] decPart = new char [ dec ] ; Arrays . fill ( decPart , '9' ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( decPart ) ; if ( clazz . equals ( Short . class ) ) { return ( D ) ( dec > 4 ? Short . valueOf ( Short . MAX_VALUE ) : Short . valueOf ( sb . toString ( ) ) ) ; } else if ( clazz . equals ( Integer . class ) ) { return ( D ) ( dec > 9 ? Integer . valueOf ( Integer . MAX_VALUE ) : Integer . valueOf ( sb . toString ( ) ) ) ; } else if ( clazz . equals ( Long . class ) ) { return ( D ) ( dec > 18 ? Long . valueOf ( Long . MAX_VALUE ) : Long . valueOf ( sb . toString ( ) ) ) ; } else if ( clazz . equals ( BigInteger . class ) ) { return ( D ) new BigInteger ( sb . toString ( ) ) ; } else if ( clazz . equals ( BigDecimal . class ) ) { if ( fractionDigits > 0 ) { sb . append ( JAVA_DECIMAL_POINT ) ; char [ ] fracPart = new char [ fractionDigits ] ; Arrays . fill ( fracPart , '9' ) ; sb . append ( fracPart ) ; } return ( D ) new BigDecimal ( sb . toString ( ) ) ; } else { throw new IllegalArgumentException ( "Unsupported java type " + clazz ) ; } |
public class CollapseProperties { /** * Adds global variable " stubs " for any properties of a global name that are only set in a local
* scope or read but never set .
* @ param n An object representing a global name ( e . g . " a " , " a . b . c " )
* @ param alias The flattened name of the object whose properties we are adding stubs for ( e . g .
* " a $ b $ c " )
* @ param parent The node to which new global variables should be added as children
* @ param addAfter The child of after which new variables should be added */
private void addStubsForUndeclaredProperties ( Name n , String alias , Node parent , Node addAfter ) { } } | checkState ( n . canCollapseUnannotatedChildNames ( ) , n ) ; checkArgument ( NodeUtil . isStatementBlock ( parent ) , parent ) ; checkNotNull ( addAfter ) ; if ( n . props == null ) { return ; } for ( Name p : n . props ) { if ( p . needsToBeStubbed ( ) ) { String propAlias = appendPropForAlias ( alias , p . getBaseName ( ) ) ; Node nameNode = IR . name ( propAlias ) ; Node newVar = IR . var ( nameNode ) . useSourceInfoIfMissingFromForTree ( addAfter ) ; parent . addChildAfter ( newVar , addAfter ) ; addAfter = newVar ; compiler . reportChangeToEnclosingScope ( newVar ) ; // Determine if this is a constant var by checking the first
// reference to it . Don ' t check the declaration , as it might be null .
if ( p . getFirstRef ( ) . getNode ( ) . getLastChild ( ) . getBooleanProp ( Node . IS_CONSTANT_NAME ) ) { nameNode . putBooleanProp ( Node . IS_CONSTANT_NAME , true ) ; compiler . reportChangeToEnclosingScope ( nameNode ) ; } } } |
public class Storage { /** * Write all data storage files .
* @ throws IOException */
public void writeAll ( ) throws IOException { } } | this . layoutVersion = FSConstants . LAYOUT_VERSION ; for ( Iterator < StorageDirectory > it = storageDirs . iterator ( ) ; it . hasNext ( ) ; ) { it . next ( ) . write ( ) ; } |
public class sslfipssimtarget { /** * Use this API to init sslfipssimtarget . */
public static base_response init ( nitro_service client , sslfipssimtarget resource ) throws Exception { } } | sslfipssimtarget initresource = new sslfipssimtarget ( ) ; initresource . certfile = resource . certfile ; initresource . keyvector = resource . keyvector ; initresource . targetsecret = resource . targetsecret ; return initresource . perform_operation ( client , "init" ) ; |
public class CommerceUserSegmentCriterionPersistenceImpl { /** * Returns all the commerce user segment criterions .
* @ return the commerce user segment criterions */
@ Override public List < CommerceUserSegmentCriterion > findAll ( ) { } } | return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class KeyHelper { /** * serialize the primary key corresponding to a Handle to an entity bean for Rel . 3.5 , 4.0.
* This method is called from the writeObject
* @ param serializable - Serializable key to be converted into a KeyHelper .
* @ return Serializable the KeyHelper with the serialized primary key .
* @ exception java . io . IOException - The EJBObject could not be serialized
* because of a system - level failure . */
public static Serializable serialize ( Serializable serializable ) throws IOException { } } | if ( serializable == null ) { return null ; } if ( serializable instanceof KeyHelper ) { return serializable ; } ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream ( ) ; ObjectOutputStream objectOutputStream = new ObjectOutputStream ( byteArrayOutputStream ) ; objectOutputStream . writeObject ( serializable ) ; byteArrayOutputStream . flush ( ) ; // d164668
KeyHelper vkh = new KeyHelper ( ) ; vkh . vBytes = byteArrayOutputStream . toByteArray ( ) ; return vkh ; |
public class BaseLogging { /** * Write to log at DEBUG level .
* @ param format
* @ param args
* @ see java . util . Formatter # format ( java . lang . String , java . lang . Object . . . )
* @ see java . util . logging . Level */
public void debug ( String format , Object ... args ) { } } | if ( isLoggable ( DEBUG ) ) { logIt ( DEBUG , String . format ( format , args ) ) ; } |
public class ParquetGroupConverter { /** * Convert a primitive group field to a " ingestion friendly " java object
* @ return " ingestion ready " java object , or null */
@ Nullable private static Object convertPrimitiveField ( Group g , int fieldIndex , int index , boolean binaryAsString ) { } } | PrimitiveType pt = ( PrimitiveType ) g . getType ( ) . getFields ( ) . get ( fieldIndex ) ; OriginalType ot = pt . getOriginalType ( ) ; try { if ( ot != null ) { // convert logical types
switch ( ot ) { case DATE : long ts = g . getInteger ( fieldIndex , index ) * MILLIS_IN_DAY ; return ts ; case TIME_MICROS : return g . getLong ( fieldIndex , index ) ; case TIME_MILLIS : return g . getInteger ( fieldIndex , index ) ; case TIMESTAMP_MICROS : return TimeUnit . MILLISECONDS . convert ( g . getLong ( fieldIndex , index ) , TimeUnit . MICROSECONDS ) ; case TIMESTAMP_MILLIS : return g . getLong ( fieldIndex , index ) ; case INTERVAL : /* INTERVAL is used for an interval of time . It must annotate a fixed _ len _ byte _ array of length 12.
This array stores three little - endian unsigned integers that represent durations at different
granularities of time . The first stores a number in months , the second stores a number in days ,
and the third stores a number in milliseconds . This representation is independent of any particular
timezone or date .
Each component in this representation is independent of the others . For example , there is no
requirement that a large number of days should be expressed as a mix of months and days because there is
not a constant conversion from days to months .
The sort order used for INTERVAL is undefined . When writing data , no min / max statistics should be
saved for this type and if such non - compliant statistics are found during reading , they must be ignored . */
Binary intervalVal = g . getBinary ( fieldIndex , index ) ; IntBuffer intBuf = intervalVal . toByteBuffer ( ) . order ( ByteOrder . LITTLE_ENDIAN ) . asIntBuffer ( ) ; int months = intBuf . get ( 0 ) ; int days = intBuf . get ( 1 ) ; int millis = intBuf . get ( 2 ) ; StringBuilder periodBuilder = new StringBuilder ( "P" ) ; if ( months > 0 ) { periodBuilder . append ( months ) . append ( "M" ) ; } if ( days > 0 ) { periodBuilder . append ( days ) . append ( "D" ) ; } if ( periodBuilder . length ( ) > 1 ) { Period p = Period . parse ( periodBuilder . toString ( ) ) ; Duration d = p . toStandardDuration ( ) . plus ( millis ) ; return d ; } else { return new Duration ( millis ) ; } case INT_8 : case INT_16 : case INT_32 : return g . getInteger ( fieldIndex , index ) ; case INT_64 : return g . getLong ( fieldIndex , index ) ; // todo : idk wtd about unsigned
case UINT_8 : case UINT_16 : case UINT_32 : return g . getInteger ( fieldIndex , index ) ; case UINT_64 : return g . getLong ( fieldIndex , index ) ; case DECIMAL : /* DECIMAL can be used to annotate the following types :
int32 : for 1 < = precision < = 9
int64 : for 1 < = precision < = 18 ; precision < 10 will produce a warning
fixed _ len _ byte _ array : precision is limited by the array size . Length n can
store < = floor ( log _ 10(2 ^ ( 8 * n - 1 ) - 1 ) ) base - 10 digits
binary : precision is not limited , but is required . The minimum number of bytes to store
the unscaled value should be used . */
int precision = pt . asPrimitiveType ( ) . getDecimalMetadata ( ) . getPrecision ( ) ; int scale = pt . asPrimitiveType ( ) . getDecimalMetadata ( ) . getScale ( ) ; switch ( pt . getPrimitiveTypeName ( ) ) { case INT32 : return new BigDecimal ( g . getInteger ( fieldIndex , index ) ) ; case INT64 : return new BigDecimal ( g . getLong ( fieldIndex , index ) ) ; case FIXED_LEN_BYTE_ARRAY : case BINARY : Binary value = g . getBinary ( fieldIndex , index ) ; return convertBinaryToDecimal ( value , precision , scale ) ; default : throw new RE ( "Unknown 'DECIMAL' type supplied to primitive conversion: %s (this should never happen)" , pt . getPrimitiveTypeName ( ) ) ; } case UTF8 : case ENUM : case JSON : return g . getString ( fieldIndex , index ) ; case LIST : case MAP : case MAP_KEY_VALUE : case BSON : default : throw new RE ( "Non-primitive supplied to primitive conversion: %s (this should never happen)" , ot . name ( ) ) ; } } else { // fallback to handling the raw primitive type if no logical type mapping
switch ( pt . getPrimitiveTypeName ( ) ) { case BOOLEAN : return g . getBoolean ( fieldIndex , index ) ; case INT32 : return g . getInteger ( fieldIndex , index ) ; case INT64 : return g . getLong ( fieldIndex , index ) ; case FLOAT : return g . getFloat ( fieldIndex , index ) ; case DOUBLE : return g . getDouble ( fieldIndex , index ) ; case INT96 : Binary tsBin = g . getInt96 ( fieldIndex , index ) ; return convertInt96BinaryToTimestamp ( tsBin ) ; case FIXED_LEN_BYTE_ARRAY : case BINARY : Binary bin = g . getBinary ( fieldIndex , index ) ; byte [ ] bytes = bin . getBytes ( ) ; if ( binaryAsString ) { return StringUtils . fromUtf8 ( bytes ) ; } else { return bytes ; } default : throw new RE ( "Unknown primitive conversion: %s" , ot . name ( ) ) ; } } } catch ( Exception ex ) { return null ; } |
public class MassPointReliefType { /** * Gets the value of the genericApplicationPropertyOfMassPointRelief property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfMassPointRelief property .
* For example , to add a new item , do as follows :
* < pre >
* get _ GenericApplicationPropertyOfMassPointRelief ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link JAXBElement } { @ code < } { @ link Object } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */
public List < JAXBElement < Object > > get_GenericApplicationPropertyOfMassPointRelief ( ) { } } | if ( _GenericApplicationPropertyOfMassPointRelief == null ) { _GenericApplicationPropertyOfMassPointRelief = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfMassPointRelief ; |
public class AmazonChimeClient { /** * Retrieves details for the specified Amazon Chime Voice Connector , such as timestamps , name , outbound host , and
* encryption requirements .
* @ param getVoiceConnectorRequest
* @ return Result of the GetVoiceConnector operation returned by the service .
* @ throws UnauthorizedClientException
* The client is not currently authorized to make the request .
* @ throws NotFoundException
* One or more of the resources in the request does not exist in the system .
* @ throws ForbiddenException
* The client is permanently forbidden from making the request . For example , when a user tries to create an
* account from an unsupported region .
* @ throws BadRequestException
* The input parameters don ' t match the service ' s restrictions .
* @ throws ThrottledClientException
* The client exceeded its request rate limit .
* @ throws ServiceUnavailableException
* The service is currently unavailable .
* @ throws ServiceFailureException
* The service encountered an unexpected error .
* @ sample AmazonChime . GetVoiceConnector
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / chime - 2018-05-01 / GetVoiceConnector " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetVoiceConnectorResult getVoiceConnector ( GetVoiceConnectorRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetVoiceConnector ( request ) ; |
public class EntityGroupImpl { /** * A member must share the < code > entityType < / code > of its containing < code > IEntityGroup < / code > .
* If it is a group , it must have a unique name within each of its containing groups and the
* resulting group must not contain a circular reference . Removed the requirement for unique
* group names . ( 03-04-2004 , de )
* @ param gm org . apereo . portal . groups . IGroupMember
* @ exception GroupsException */
private void checkProspectiveMember ( IGroupMember gm ) throws GroupsException { } } | if ( gm . equals ( this ) ) { throw new GroupsException ( "Attempt to add " + gm + " to itself." ) ; } // Type check :
if ( this . getLeafType ( ) != gm . getLeafType ( ) ) { throw new GroupsException ( this + " and " + gm + " have different entity types." ) ; } // Circular reference check :
if ( gm . isGroup ( ) && gm . asGroup ( ) . deepContains ( this ) ) { throw new GroupsException ( "Adding " + gm + " to " + this + " creates a circular reference." ) ; } |
public class nat64 { /** * Use this API to fetch nat64 resource of given name . */
public static nat64 get ( nitro_service service , String name ) throws Exception { } } | nat64 obj = new nat64 ( ) ; obj . set_name ( name ) ; nat64 response = ( nat64 ) obj . get_resource ( service ) ; return response ; |
public class DefaultResourceResolver { /** * Do the standard resource resolving of sSystemId relative to sBaseURI
* @ param sSystemId
* The resource to search . May be relative to the base URI or absolute .
* May be < code > null < / code > if base URI is set .
* @ param sBaseURI
* The base URI from where the search is initiated . May be
* < code > null < / code > if sSystemId is set .
* @ param aClassLoader
* The class loader to be used for { @ link ClassPathResource } objects .
* May be < code > null < / code > in which case the default class loader is
* used .
* @ return The non - < code > null < / code > resource . May be non - existing !
* @ throws UncheckedIOException
* In case the file resolution ( to an absolute file ) fails . */
@ Nonnull public static IReadableResource getResolvedResource ( @ Nullable final String sSystemId , @ Nullable final String sBaseURI , @ Nullable final ClassLoader aClassLoader ) { } } | if ( StringHelper . hasNoText ( sSystemId ) && StringHelper . hasNoText ( sBaseURI ) ) throw new IllegalArgumentException ( "Both systemID and baseURI are null!" ) ; // Retrieve only once
final boolean bDebugResolve = isDebugResolve ( ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Trying to resolve resource " + sSystemId + " from base " + sBaseURI + ( aClassLoader == null ? "" : " with ClassLoader " + aClassLoader ) ) ; if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "doStandardResourceResolving ('" + sSystemId + "', '" + sBaseURI + "', " + aClassLoader + ")" ) ; // It happens quite often that some resolution does not work here
final URL aSystemURL = URLHelper . getAsURL ( sSystemId , false ) ; // Was an absolute URL requested ?
if ( aSystemURL != null ) { // File URL are handled separately , as they might be relative ( as in
// ' file : . . / dir / include . xml ' ) !
if ( ! aSystemURL . getProtocol ( ) . equals ( URLHelper . PROTOCOL_FILE ) ) { final URLResource ret = new URLResource ( aSystemURL ) ; if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( " resolved system URL to " + ret ) ; return ret ; } } if ( ClassPathResource . isExplicitClassPathResource ( sBaseURI ) ) return _resolveClassPathResource ( sSystemId , sBaseURI , aClassLoader ) ; // jar : file or wsjar : file or zip : file ? ? ?
if ( isExplicitJarFileResource ( sBaseURI ) ) try { return _resolveJarFileResource ( sSystemId , sBaseURI ) ; } catch ( final MalformedURLException ex ) { throw new UncheckedIOException ( ex ) ; } // Try whether the base is a URI
final URL aBaseURL = URLHelper . getAsURL ( sBaseURI ) ; // Handle " file " protocol separately
if ( aBaseURL != null && ! aBaseURL . getProtocol ( ) . equals ( URLHelper . PROTOCOL_FILE ) ) try { return _resolveURLResource ( sSystemId , aBaseURL ) ; } catch ( final MalformedURLException ex ) { throw new UncheckedIOException ( ex ) ; } // Base is not a URL or a file based URL
File aBaseFile ; if ( aBaseURL != null ) aBaseFile = URLHelper . getAsFile ( aBaseURL ) ; else if ( sBaseURI != null ) aBaseFile = new File ( sBaseURI ) ; else aBaseFile = null ; if ( StringHelper . hasNoText ( sSystemId ) ) { // Nothing to resolve
// Note : BaseFile should always be set here !
final FileSystemResource ret = new FileSystemResource ( aBaseFile ) ; if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( " resolved base URL to " + ret ) ; return ret ; } // Get the system ID file
File aSystemFile ; if ( aSystemURL != null ) aSystemFile = URLHelper . getAsFile ( aSystemURL ) ; else aSystemFile = new File ( sSystemId ) ; // If the provided file is an absolute file , take it
if ( aSystemFile . isAbsolute ( ) ) { final FileSystemResource aAbsFile = new FileSystemResource ( aSystemFile ) ; if ( ! aAbsFile . exists ( ) ) { // Sometimes paths starting with " / " are passed in - as they are
// considered absolute when running on Linux , try if a combined file
// eventually exists
final FileSystemResource aMerged = _getChildResource ( aBaseFile , aSystemFile ) ; if ( aMerged . exists ( ) ) { if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( " resolved base + system URL to " + aMerged ) ; return aMerged ; } } // If the absolute version exists , or if both the absolute and the merged
// version do NOT exist , return the absolute version anyway .
if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( " resolved system URL to " + aAbsFile ) ; return aAbsFile ; } final FileSystemResource ret = _getChildResource ( aBaseFile , aSystemFile ) ; if ( bDebugResolve ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( " resolved base + system URL to " + ret ) ; return ret ; |
public class HierarchyEntityUtils { /** * addParent .
* @ param nodes a { @ link java . util . Collection } object .
* @ param toRoot a T object .
* @ param < T > a T object . */
public static < T extends HierarchyEntity < T , ? > > void addParent ( Collection < T > nodes , T toRoot ) { } } | Set < T > parents = CollectUtils . newHashSet ( ) ; for ( T node : nodes ) { while ( null != node . getParent ( ) && ! parents . contains ( node . getParent ( ) ) && ! Objects . equals ( node . getParent ( ) , toRoot ) ) { parents . add ( node . getParent ( ) ) ; node = node . getParent ( ) ; } } nodes . addAll ( parents ) ; |
public class JTimePopup { /** * Create this calendar in a popup menu and synchronize the text field on change .
* @ param dateTarget The initial date for this button .
* @ param button The calling button . */
public static JTimePopup createTimePopup ( Date dateTarget , Component button ) { } } | return JTimePopup . createTimePopup ( null , dateTarget , button , null ) ; |
public class SVGPath { /** * Draw a line to the given coordinates .
* @ param x new coordinates
* @ param y new coordinates
* @ return path object , for compact syntax . */
public SVGPath lineTo ( double x , double y ) { } } | return append ( PATH_LINE_TO ) . append ( x ) . append ( y ) ; |
public class BatchDeleteClusterSnapshotsResult { /** * A list of any errors returned .
* @ return A list of any errors returned . */
public java . util . List < SnapshotErrorMessage > getErrors ( ) { } } | if ( errors == null ) { errors = new com . amazonaws . internal . SdkInternalList < SnapshotErrorMessage > ( ) ; } return errors ; |
public class CheckMysql { /** * Configures the threshold evaluator . This plugin supports both the legacy
* threshold format and the new format specification .
* @ param thrb
* - the evaluator to be configured
* @ param cl
* - the received command line
* @ throws BadThresholdException
* - if the threshold can ' t be parsed */
@ Override public final void configureThresholdEvaluatorBuilder ( final ThresholdsEvaluatorBuilder thrb , final ICommandLine cl ) throws BadThresholdException { } } | if ( cl . hasOption ( "th" ) ) { super . configureThresholdEvaluatorBuilder ( thrb , cl ) ; } else { thrb . withLegacyThreshold ( "time" , null , cl . getOptionValue ( "warning" ) , cl . getOptionValue ( "critical" ) ) . withLegacyThreshold ( "secondsBehindMaster" , null , cl . getOptionValue ( "warning" ) , cl . getOptionValue ( "critical" ) ) ; } |
public class RestClient { /** * Create Gson converter for the service .
* @ return Gson converter . */
private Gson createGson ( ) { } } | GsonBuilder gsonBuilder = new GsonBuilder ( ) . disableHtmlEscaping ( ) . setLenient ( ) . addSerializationExclusionStrategy ( new ExclusionStrategy ( ) { @ Override public boolean shouldSkipField ( FieldAttributes f ) { return f . getAnnotation ( SerializedName . class ) == null ; } @ Override public boolean shouldSkipClass ( Class < ? > clazz ) { return false ; } } ) ; return gsonBuilder . create ( ) ; |
public class Utils { /** * Delete a file or directory
* @ param file { @ link File } representing file or directory
* @ throws IOException */
public static void deleteFileOrDirectory ( File file ) throws IOException { } } | if ( file . exists ( ) ) { if ( file . isDirectory ( ) ) { Path rootPath = Paths . get ( file . getAbsolutePath ( ) ) ; Files . walk ( rootPath , FileVisitOption . FOLLOW_LINKS ) . sorted ( Comparator . reverseOrder ( ) ) . map ( Path :: toFile ) . forEach ( File :: delete ) ; } else { file . delete ( ) ; } } else { throw new RuntimeException ( "File or directory does not exist" ) ; } |
public class AbstractAnimatedZoomableController { /** * Sets a new zoomable transformation and animates to it if desired .
* < p > If this method is called while an animation or gesture is already in progress ,
* the current animation or gesture will be stopped first .
* @ param newTransform new transform to make active
* @ param durationMs duration of the animation , or 0 to not animate
* @ param onAnimationComplete code to run when the animation completes . Ignored if durationMs = 0 */
public void setTransform ( Matrix newTransform , long durationMs , @ Nullable Runnable onAnimationComplete ) { } } | FLog . v ( getLogTag ( ) , "setTransform: duration %d ms" , durationMs ) ; if ( durationMs <= 0 ) { setTransformImmediate ( newTransform ) ; } else { setTransformAnimated ( newTransform , durationMs , onAnimationComplete ) ; } |
public class SoundManager { /** * adds the IzouSoundLine as permanent
* @ param izouSoundLine the izouSoundLine to add */
private void addPermanent ( IzouSoundLineBaseClass izouSoundLine ) { } } | debug ( "adding " + izouSoundLine + " to permanent" ) ; if ( ! izouSoundLine . isPermanent ( ) ) izouSoundLine . setToPermanent ( ) ; synchronized ( permanentUserReadWriteLock ) { endWaitingForUsage ( ) ; if ( permanentLines == null ) { permanentLines = Collections . synchronizedList ( new ArrayList < > ( ) ) ; } permanentLines . add ( new WeakReference < > ( izouSoundLine ) ) ; } // TODO : STOP the addon via the stop event |
public class FluentValidator { /** * 在待验证对象集合 < tt > t < / tt > 上 , 使用 < tt > v < / tt > 验证器进行验证
* 注 : 当集合为空时 , 则会跳过
* @ param t 待验证对象集合
* @ param v 验证器
* @ return FluentValidator */
public < T > FluentValidator onEach ( Collection < T > t , final Validator < T > v ) { } } | Preconditions . checkNotNull ( v , "Validator should not be NULL" ) ; if ( CollectionUtil . isEmpty ( t ) ) { lastAddCount = 0 ; } else { List < ValidatorElement > elementList = CollectionUtil . transform ( t , new Function < T , ValidatorElement > ( ) { @ Override public ValidatorElement apply ( T elem ) { composeIfPossible ( v , elem ) ; return new ValidatorElement ( elem , v ) ; } } ) ; lastAddCount = t . size ( ) ; doAdd ( new IterableValidatorElement ( elementList ) ) ; } return this ; |
public class Searcher { /** * Adds one or several attributes to facet on for the next queries .
* @ param attributes one or more attribute names .
* @ return this { @ link Searcher } for chaining . */
@ SuppressWarnings ( { } } | "WeakerAccess" , "unused" } ) // For library users
public Searcher addFacet ( String ... attributes ) { for ( String attribute : attributes ) { final Integer value = facetRequestCount . get ( attribute ) ; facetRequestCount . put ( attribute , value == null ? 1 : value + 1 ) ; if ( value == null || value == 0 ) { facets . add ( attribute ) ; } } rebuildQueryFacets ( ) ; return this ; |
public class CalendarApi { /** * Get attendees Get all invited attendees for a given event - - - This route
* is cached for up to 600 seconds SSO Scope :
* esi - calendar . read _ calendar _ events . v1
* @ param characterId
* An EVE character ID ( required )
* @ param eventId
* The id of the event requested ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return List & lt ; CharacterCalendarAttendeesResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < CharacterCalendarAttendeesResponse > getCharactersCharacterIdCalendarEventIdAttendees ( Integer characterId , Integer eventId , String datasource , String ifNoneMatch , String token ) throws ApiException { } } | ApiResponse < List < CharacterCalendarAttendeesResponse > > resp = getCharactersCharacterIdCalendarEventIdAttendeesWithHttpInfo ( characterId , eventId , datasource , ifNoneMatch , token ) ; return resp . getData ( ) ; |
public class XMLConfigWebFactory { /** * creates a new ServletConfig Impl Object
* @ param configServer
* @ param configDir
* @ param servletConfig
* @ return new Instance
* @ throws SAXException
* @ throws ClassNotFoundException
* @ throws PageException
* @ throws IOException
* @ throws TagLibException
* @ throws FunctionLibException
* @ throws NoSuchAlgorithmException
* @ throws BundleException */
public static ConfigWebImpl newInstance ( CFMLEngine engine , CFMLFactoryImpl factory , ConfigServerImpl configServer , Resource configDir , boolean isConfigDirACustomSetting , ServletConfig servletConfig ) throws SAXException , ClassException , PageException , IOException , TagLibException , FunctionLibException , NoSuchAlgorithmException , BundleException { } } | String hash = SystemUtil . hash ( servletConfig . getServletContext ( ) ) ; Map < String , String > labels = configServer . getLabels ( ) ; String label = null ; if ( labels != null ) { label = labels . get ( hash ) ; } if ( label == null ) label = hash ; // make sure the web context does not point to the same directory as the server context
if ( configDir . equals ( configServer . getConfigDir ( ) ) ) throw new ApplicationException ( "the web context [" + label + "] has defined the same configuration directory [" + configDir + "] as the server context" ) ; ConfigWeb [ ] webs = configServer . getConfigWebs ( ) ; if ( ! ArrayUtil . isEmpty ( webs ) ) { for ( int i = 0 ; i < webs . length ; i ++ ) { // not sure this is necessary if ( hash . equals ( ( ( ConfigWebImpl ) webs [ i ] ) . getHash ( ) ) ) continue ;
if ( configDir . equals ( webs [ i ] . getConfigDir ( ) ) ) throw new ApplicationException ( "the web context [" + label + "] has defined the same configuration directory [" + configDir + "] as the web context [" + webs [ i ] . getLabel ( ) + "]" ) ; } } SystemOut . print ( SystemUtil . getPrintWriter ( SystemUtil . OUT ) , "===================================================================\n" + "WEB CONTEXT (" + label + ")\n" + "-------------------------------------------------------------------\n" + "- config:" + configDir + ( isConfigDirACustomSetting ? " (custom setting)" : "" ) + "\n" + "- webroot:" + ReqRspUtil . getRootPath ( servletConfig . getServletContext ( ) ) + "\n" + "- hash:" + hash + "\n" + "- label:" + label + "\n" + "===================================================================\n" ) ; int iDoNew = doNew ( engine , configDir , false ) . updateType ; boolean doNew = iDoNew != NEW_NONE ; Resource configFile = configDir . getRealResource ( "lucee-web.xml." + TEMPLATE_EXTENSION ) ; String strPath = servletConfig . getServletContext ( ) . getRealPath ( "/WEB-INF" ) ; Resource path = ResourcesImpl . getFileResourceProvider ( ) . getResource ( strPath ) ; // get config file
if ( ! configFile . exists ( ) ) { createConfigFile ( "web" , configFile ) ; } Document doc = null ; Resource bugFile ; int count = 1 ; doc = loadDocumentCreateIfFails ( configFile , "web" ) ; // htaccess
if ( path . exists ( ) ) createHtAccess ( path . getRealResource ( ".htaccess" ) ) ; if ( configDir . exists ( ) ) createHtAccess ( configDir . getRealResource ( ".htaccess" ) ) ; createContextFiles ( configDir , servletConfig , doNew ) ; ConfigWebImpl configWeb = new ConfigWebImpl ( factory , configServer , servletConfig , configDir , configFile ) ; load ( configServer , configWeb , doc , false , doNew ) ; createContextFilesPost ( configDir , configWeb , servletConfig , false , doNew ) ; // call web . cfc for this context
( ( CFMLEngineImpl ) ConfigWebUtil . getEngine ( configWeb ) ) . onStart ( configWeb , false ) ; return configWeb ; |
public class SimpleListener { /** * 连接关闭前触发本方法
* @ param channelContext the channelcontext
* @ param throwable the throwable 有可能为空
* @ param remark the remark 有可能为空
* @ param isRemove
* @ author tanyaowu */
@ Override public void onBeforeClose ( ChannelContext channelContext , Throwable throwable , String remark , boolean isRemove ) { } } | log . debug ( "连接关闭前触发onBeforeClose" ) ; |
public class ArrayMatrix { /** * { @ inheritDoc } */
public double [ ] [ ] toDenseArray ( ) { } } | double [ ] [ ] m = new double [ rows ] [ cols ] ; int i = 0 ; for ( int row = 0 ; row < rows ; ++ row ) { for ( int col = 0 ; col < cols ; ++ col ) m [ row ] [ col ] = matrix [ i ++ ] ; } return m ; |
public class MyBatisUtils { /** * 此方法用于Mybatis的逆向工程
* @ param configFilePath 逆向工程的配置文件路径
* @ param overwrite 若自动生成的文件在配置文件中配置的路径下有同名文件是否重写
* @ param warnings 逆向工程执行期间产生的警告信息 , 存入此参数中
* @ throws Exception */
public static void myBatisAutoGenerator ( String configFilePath , boolean overwrite , List < String > warnings ) throws Exception { } } | File configFile = new File ( configFilePath ) ; ConfigurationParser cp = new ConfigurationParser ( warnings ) ; Configuration config = cp . parseConfiguration ( configFile ) ; DefaultShellCallback callback = new DefaultShellCallback ( overwrite ) ; MyBatisGenerator myBatisGenerator = new MyBatisGenerator ( config , callback , warnings ) ; myBatisGenerator . generate ( null ) ; |
public class Iconics { /** * tries to find a font by its key in all registered FONTS */
@ Nullable public static ITypeface findFont ( @ NonNull Context ctx , @ NonNull String key ) { } } | init ( ctx ) ; return FONTS . get ( key ) ; |
public class Calendar { /** * = = = = = privates = = = = = */
private static void appendValue ( StringBuilder sb , String item , boolean valid , long value ) { } } | sb . append ( item ) . append ( '=' ) ; if ( valid ) { sb . append ( value ) ; } else { sb . append ( '?' ) ; } |
public class Util { /** * Converts from java . sql Types to common java types like java . util . Date and
* numeric types . See { @ link Builder # autoMap ( Class ) } .
* @ param o
* @ param cls
* @ return */
public static Object autoMap ( Object o , Class < ? > cls ) { } } | if ( o == null ) return o ; else if ( cls . isAssignableFrom ( o . getClass ( ) ) ) { return o ; } else { if ( o instanceof java . sql . Date ) { java . sql . Date d = ( java . sql . Date ) o ; if ( cls . isAssignableFrom ( Long . class ) ) return d . getTime ( ) ; else if ( cls . isAssignableFrom ( BigInteger . class ) ) return BigInteger . valueOf ( d . getTime ( ) ) ; else return o ; } else if ( o instanceof java . sql . Timestamp ) { Timestamp t = ( java . sql . Timestamp ) o ; if ( cls . isAssignableFrom ( Long . class ) ) return t . getTime ( ) ; else if ( cls . isAssignableFrom ( BigInteger . class ) ) return BigInteger . valueOf ( t . getTime ( ) ) ; else return o ; } else if ( o instanceof java . sql . Time ) { Time t = ( java . sql . Time ) o ; if ( cls . isAssignableFrom ( Long . class ) ) return t . getTime ( ) ; else if ( cls . isAssignableFrom ( BigInteger . class ) ) return BigInteger . valueOf ( t . getTime ( ) ) ; else return o ; } else if ( o instanceof Blob && cls . isAssignableFrom ( byte [ ] . class ) ) { return toBytes ( ( Blob ) o ) ; } else if ( o instanceof Clob && cls . isAssignableFrom ( String . class ) ) { return toString ( ( Clob ) o ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( Long . class ) ) { return ( ( BigInteger ) o ) . longValue ( ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( Integer . class ) ) { return ( ( BigInteger ) o ) . intValue ( ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( Double . class ) ) { return ( ( BigInteger ) o ) . doubleValue ( ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( Float . class ) ) { return ( ( BigInteger ) o ) . floatValue ( ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( Short . class ) ) { return ( ( BigInteger ) o ) . shortValue ( ) ; } else if ( o instanceof BigInteger && cls . isAssignableFrom ( BigDecimal . class ) ) { return new BigDecimal ( ( BigInteger ) o ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( Double . class ) ) { return ( ( BigDecimal ) o ) . doubleValue ( ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( Integer . class ) ) { return ( ( BigDecimal ) o ) . toBigInteger ( ) . intValue ( ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( Float . class ) ) { return ( ( BigDecimal ) o ) . floatValue ( ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( Short . class ) ) { return ( ( BigDecimal ) o ) . toBigInteger ( ) . shortValue ( ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( Long . class ) ) { return ( ( BigDecimal ) o ) . toBigInteger ( ) . longValue ( ) ; } else if ( o instanceof BigDecimal && cls . isAssignableFrom ( BigInteger . class ) ) { return ( ( BigDecimal ) o ) . toBigInteger ( ) ; } else if ( ( o instanceof Short || o instanceof Integer || o instanceof Long ) && cls . isAssignableFrom ( BigInteger . class ) ) { return new BigInteger ( o . toString ( ) ) ; } else if ( o instanceof Number && cls . isAssignableFrom ( BigDecimal . class ) ) { return new BigDecimal ( o . toString ( ) ) ; } else if ( o instanceof Number && cls . isAssignableFrom ( Short . class ) ) return ( ( Number ) o ) . shortValue ( ) ; else if ( o instanceof Number && cls . isAssignableFrom ( Integer . class ) ) return ( ( Number ) o ) . intValue ( ) ; else if ( o instanceof Number && cls . isAssignableFrom ( Integer . class ) ) return ( ( Number ) o ) . intValue ( ) ; else if ( o instanceof Number && cls . isAssignableFrom ( Long . class ) ) return ( ( Number ) o ) . longValue ( ) ; else if ( o instanceof Number && cls . isAssignableFrom ( Float . class ) ) return ( ( Number ) o ) . floatValue ( ) ; else if ( o instanceof Number && cls . isAssignableFrom ( Double . class ) ) return ( ( Number ) o ) . doubleValue ( ) ; else return o ; } |
public class VariantCustom { /** * Support method to add a new PostData param to this custom variant
* @ param name the param name
* @ param value the value of this parameter */
public void addParamPost ( String name , String value ) { } } | addParam ( name , value , NameValuePair . TYPE_POST_DATA ) ; |
public class Combinatorics { /** * Returns the permutations of a collection .
* Ported from :
* http : / / stackoverflow . com / questions / 10503392 / java - code - for - permutations - of - a - list - of - numbers
* @ param < T >
* @ param elements
* @ return */
public static < T > Collection < List < T > > permutations ( Collection < T > elements ) { } } | Collection < List < T > > result = new ArrayList < > ( ) ; if ( elements . isEmpty ( ) ) { result . add ( new LinkedList < > ( ) ) ; return result ; } List < T > rest = new LinkedList < > ( elements ) ; T head = rest . remove ( 0 ) ; for ( List < T > permutations : permutations ( rest ) ) { List < List < T > > subLists = new ArrayList < > ( ) ; for ( int i = 0 ; i <= permutations . size ( ) ; i ++ ) { List < T > subList = new ArrayList < > ( ) ; subList . addAll ( permutations ) ; subList . add ( i , head ) ; subLists . add ( subList ) ; } result . addAll ( subLists ) ; } return result ; |
public class PostgreSqlQueryUtils { /** * Returns all non - bidirectional attributes persisted by PostgreSQL in junction tables ( e . g . no
* compound attributes and attributes with an expression )
* @ return stream of attributes persisted by PostgreSQL in junction tables */
static Stream < Attribute > getJunctionTableAttributes ( EntityType entityType ) { } } | // return all attributes referencing multiple entities except for one - to - many attributes that
// are mapped by
// another attribute
return getPersistedAttributes ( entityType ) . filter ( attr -> isMultipleReferenceType ( attr ) && ! ( attr . getDataType ( ) == ONE_TO_MANY && attr . isMappedBy ( ) ) ) ; |
public class Trie { /** * Gets the offset to the data which the index ch after variable offset
* points to .
* Note for locating a non - supplementary character data offset , calling
* getRawOffset ( 0 , ch ) ;
* will do . Otherwise if it is a supplementary character formed by
* surrogates lead and trail . Then we would have to call getRawOffset ( )
* with getFoldingIndexOffset ( ) . See getSurrogateOffset ( ) .
* @ param offset index offset which ch is to start from
* @ param ch index to be used after offset
* @ return offset to the data */
protected final int getRawOffset ( int offset , char ch ) { } } | return ( m_index_ [ offset + ( ch >> INDEX_STAGE_1_SHIFT_ ) ] << INDEX_STAGE_2_SHIFT_ ) + ( ch & INDEX_STAGE_3_MASK_ ) ; |
public class PageSpec { /** * Find all objects that match galen object statements
* @ param objectExpression - Galen object statements
* @ return */
public List < String > findOnlyExistingMatchingObjectNames ( String objectExpression ) { } } | String [ ] parts = objectExpression . split ( "," ) ; List < String > allSortedObjectNames = getSortedObjectNames ( ) ; List < String > resultingObjectNames = new LinkedList < > ( ) ; for ( String part : parts ) { String singleExpression = part . trim ( ) ; if ( ! singleExpression . isEmpty ( ) ) { if ( GalenUtils . isObjectGroup ( singleExpression ) ) { resultingObjectNames . addAll ( findObjectsInGroup ( GalenUtils . extractGroupName ( singleExpression ) ) ) ; } else if ( GalenUtils . isObjectsSearchExpression ( singleExpression ) ) { Pattern objectPattern = GalenUtils . convertObjectNameRegex ( singleExpression ) ; for ( String objectName : allSortedObjectNames ) { if ( objectPattern . matcher ( objectName ) . matches ( ) ) { resultingObjectNames . add ( objectName ) ; } } } else if ( objects . containsKey ( singleExpression ) ) { resultingObjectNames . add ( singleExpression ) ; } } } return resultingObjectNames ; |
public class Amp { /** * public static ServiceManagerAmp newContextManager ( )
* ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ;
* synchronized ( _ contextManager ) {
* ServiceManagerAmp manager = _ contextManager . getLevel ( loader ) ;
* if ( manager = = null ) {
* manager = newManager ( ) ;
* _ contextManager . set ( manager ) ;
* return manager ; */
public static ServicesAmp getContextManager ( ClassLoader loader ) { } } | ServicesAmp manager = _contextManager . getLevel ( loader ) ; if ( manager == null ) { SoftReference < ServicesAmp > managerRef = _contextMap . get ( loader ) ; if ( managerRef != null ) { return managerRef . get ( ) ; } /* if ( log . isLoggable ( Level . FINEST ) ) {
RuntimeException exn = new IllegalStateException ( String . valueOf ( loader ) ) ;
/ / exn . fillInStackTrace ( ) ;
log . log ( Level . FINEST , exn . toString ( ) , exn ) ; */
} return manager ; /* if ( manager = = null ) {
ServiceManagerBuilderAmp managerBuilder = newManagerBuilder ( ) ;
String name = Environment . getEnvironmentName ( loader ) ;
managerBuilder . classLoader ( loader ) ;
managerBuilder . name ( name ) ;
manager = managerBuilder . build ( ) ;
_ contextManager . setIfAbsent ( manager ) ;
manager = _ contextManager . get ( loader ) ;
return manager ; */ |
public class CommonSteps { /** * Simulates the mouse over a html element
* @ param page
* The concerned page of elementName
* @ param elementName
* Is target element
* @ param conditions
* list of ' expected ' values condition and ' actual ' values ( { @ link com . github . noraui . gherkin . GherkinStepCondition } ) .
* @ throws TechnicalException
* is throws if you have a technical error ( format , configuration , datas , . . . ) in NoraUi .
* Exception with { @ value com . github . noraui . utils . Messages # FAIL _ MESSAGE _ UNABLE _ TO _ PASS _ OVER _ ELEMENT } message ( with screenshot , no exception )
* @ throws FailureException
* if the scenario encounters a functional error */
@ Conditioned @ Quand ( "Je passe au dessus de '(.*)-(.*)'[\\.|\\?]" ) @ When ( "I pass over '(.*)-(.*)'[\\.|\\?]" ) public void passOver ( String page , String elementName , List < GherkinStepCondition > conditions ) throws TechnicalException , FailureException { } } | passOver ( Page . getInstance ( page ) . getPageElementByKey ( '-' + elementName ) ) ; |
public class LUDecomposition { /** * Is the matrix nonsingular ?
* @ return true if U , and hence A , is nonsingular . */
public boolean isNonsingular ( ) { } } | for ( int j = 0 ; j < n ; j ++ ) { if ( LU [ j ] [ j ] == 0 ) return false ; } return true ; |
public class RibbonColumnRegistry { /** * Add another key to the registry . This will overwrite the previous value .
* @ param key
* Unique key for the action within this registry .
* @ param ribbonColumnCreator
* Implementation of the { @ link RibbonColumnCreator } interface to link the correct type of ribbon column
* widget to the key . */
public static void put ( String key , RibbonColumnCreator ribbonColumnCreator ) { } } | if ( null != key && null != ribbonColumnCreator ) { REGISTRY . put ( key , ribbonColumnCreator ) ; } |
public class ZipkinHttpCollector { /** * This synchronously decodes the message so that users can see data errors . */
HttpResponse validateAndStoreSpans ( SpanBytesDecoder decoder , byte [ ] serializedSpans ) { } } | // logging already handled upstream in UnzippingBytesRequestConverter where request context exists
if ( serializedSpans . length == 0 ) return HttpResponse . of ( HttpStatus . ACCEPTED ) ; try { SpanBytesDecoderDetector . decoderForListMessage ( serializedSpans ) ; } catch ( IllegalArgumentException e ) { metrics . incrementMessagesDropped ( ) ; return HttpResponse . of ( BAD_REQUEST , MediaType . PLAIN_TEXT_UTF_8 , "Expected a " + decoder + " encoded list\n" ) ; } SpanBytesDecoder unexpectedDecoder = testForUnexpectedFormat ( decoder , serializedSpans ) ; if ( unexpectedDecoder != null ) { metrics . incrementMessagesDropped ( ) ; return HttpResponse . of ( BAD_REQUEST , MediaType . PLAIN_TEXT_UTF_8 , "Expected a " + decoder + " encoded list, but received: " + unexpectedDecoder + "\n" ) ; } CompletableCallback result = new CompletableCallback ( ) ; List < Span > spans = new ArrayList < > ( ) ; if ( ! decoder . decodeList ( serializedSpans , spans ) ) { throw new IllegalArgumentException ( "Empty " + decoder . name ( ) + " message" ) ; } collector . accept ( spans , result ) ; return HttpResponse . from ( result ) ; |
public class DoubleIntegerDBIDArrayList { /** * Grow the data storage . */
protected void grow ( ) { } } | if ( dists == EMPTY_DISTS ) { dists = new double [ INITIAL_SIZE ] ; ids = new int [ INITIAL_SIZE ] ; return ; } final int len = dists . length ; final int newlength = len + ( len >> 1 ) + 1 ; double [ ] odists = dists ; dists = new double [ newlength ] ; System . arraycopy ( odists , 0 , dists , 0 , odists . length ) ; int [ ] oids = ids ; ids = new int [ newlength ] ; System . arraycopy ( oids , 0 , ids , 0 , oids . length ) ; |
public class RequestUploadCredentialsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RequestUploadCredentialsRequest requestUploadCredentialsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( requestUploadCredentialsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( requestUploadCredentialsRequest . getBuildId ( ) , BUILDID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ConvertNV21 { /** * Converts an NV21 image into a { @ link InterleavedU8 } RGB image .
* @ param data Input : NV21 image data
* @ param width Input : NV21 image width
* @ param height Input : NV21 image height
* @ param output Output : Optional storage for output image . Can be null . */
public static InterleavedU8 nv21ToInterleaved ( byte [ ] data , int width , int height , InterleavedU8 output ) { } } | if ( output == null ) { output = new InterleavedU8 ( width , height , 3 ) ; } else { output . reshape ( width , height , 3 ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ImplConvertNV21_MT . nv21ToInterleaved_U8 ( data , output ) ; } else { ImplConvertNV21 . nv21ToInterleaved_U8 ( data , output ) ; } return output ; |
public class PasswordFilter { /** * / * ( non - Javadoc )
* @ see java . util . logging . Logger # logrb ( java . util . logging . Level , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . Throwable ) */
@ Override public void logrb ( Level level , String sourceClass , String sourceMethod , String bundleName , String msg , Throwable thrown ) { } } | super . logrb ( level , sourceClass , sourceMethod , bundleName , maskPassword ( msg ) , thrown ) ; |
public class OmemoService { /** * Publish the given OMEMO bundle to the server using PubSub .
* @ param connection our connection .
* @ param userDevice our device
* @ param bundle the bundle we want to publish
* @ throws XMPPException . XMPPErrorException
* @ throws SmackException . NotConnectedException
* @ throws InterruptedException
* @ throws SmackException . NoResponseException */
static void publishBundle ( XMPPConnection connection , OmemoDevice userDevice , OmemoBundleElement bundle ) throws XMPPException . XMPPErrorException , SmackException . NotConnectedException , InterruptedException , SmackException . NoResponseException { } } | PubSubManager pm = PubSubManager . getInstanceFor ( connection , connection . getUser ( ) . asBareJid ( ) ) ; pm . tryToPublishAndPossibleAutoCreate ( userDevice . getBundleNodeName ( ) , new PayloadItem < > ( bundle ) ) ; |
public class SLINK { /** * Fourth step : Actualize the clusters if necessary
* @ param id the id of the current object
* @ param it array iterator
* @ param n Last object to process at this run
* @ param pi Pi data store
* @ param lambda Lambda data store */
private void slinkstep4 ( DBIDRef id , DBIDArrayIter it , int n , WritableDBIDDataStore pi , WritableDoubleDataStore lambda ) { } } | DBIDVar p_i = DBIDUtil . newVar ( ) ; // for i = 1 . . n
for ( it . seek ( 0 ) ; it . getOffset ( ) < n ; it . advance ( ) ) { double l_i = lambda . doubleValue ( it ) ; p_i . from ( pi , it ) ; // p _ i = pi ( it )
double lp_i = lambda . doubleValue ( p_i ) ; // if L ( i ) > = L ( P ( i ) )
if ( l_i >= lp_i ) { // P ( i ) = n + 1
pi . put ( it , id ) ; } } |
public class MapperConstructorAccessor { /** * This method finds the configuration location , returns null if don ' t finds it
* @ param cc configuration chosen
* @ param xml xml object
* @ return configuration found */
protected ChooseConfig searchConfig ( ChooseConfig cc , XML xml ) { } } | ChooseConfig config = searchXmlConfig ( cc , xml ) ; if ( isNull ( config ) ) config = searchAnnotatedConfig ( cc ) ; return config ; |
public class WebJBossASClient { /** * Use this to modify an attribute for an existing connector .
* @ param connectorName the existing connector whose attribute is to be changed
* @ param attribName the attribute to get a new value
* @ param attribValue the new value of the attribute
* @ throws Exception if failed to change the attribute on the named connector */
public void changeConnector ( String connectorName , String attribName , String attribValue ) throws Exception { } } | final Address address = Address . root ( ) . add ( SUBSYSTEM , SUBSYSTEM_WEB , CONNECTOR , connectorName ) ; final ModelNode op = createWriteAttributeRequest ( attribName , attribValue , address ) ; final ModelNode response = execute ( op ) ; if ( ! isSuccess ( response ) ) { throw new FailureException ( response ) ; } return ; |
public class ObjectCounter { /** * Adds the counts from the provided { @ code Counter } to the current counts ,
* adding new elements as needed . */
public void add ( Counter < ? extends T > counter ) { } } | for ( Map . Entry < ? extends T , Integer > e : counter ) { T t = e . getKey ( ) ; Integer cur = counts . get ( t ) ; counts . put ( t , ( cur == null ) ? e . getValue ( ) : cur + e . getValue ( ) ) ; } |
public class CmsJspNavigationBean { /** * Returns the list of selected navigation elements . < p >
* @ return the list of selected navigation elements */
public List < CmsJspNavElement > getItems ( ) { } } | if ( m_items == null ) { switch ( m_type ) { // calculate the results based on the given parameters
case forFolder : if ( m_startLevel == Integer . MIN_VALUE ) { // no start level set
if ( m_resource == null ) { m_items = m_builder . getNavigationForFolder ( ) ; } else { m_items = m_builder . getNavigationForFolder ( m_resource ) ; } } else { // start level is set
if ( m_resource == null ) { m_items = m_builder . getNavigationForFolder ( m_startLevel ) ; } else { m_items = m_builder . getNavigationForFolder ( m_resource , m_startLevel ) ; } } break ; case forSite : if ( m_resource == null ) { m_items = m_builder . getSiteNavigation ( ) ; } else { m_items = m_builder . getSiteNavigation ( m_resource , m_endLevel ) ; } break ; case breadCrumb : if ( m_resource != null ) { // resource is set
m_items = m_builder . getNavigationBreadCrumb ( m_resource , m_startLevel , m_endLevel , Boolean . valueOf ( m_param ) . booleanValue ( ) ) ; } else { if ( m_startLevel == Integer . MIN_VALUE ) { // default start level is zero
m_items = m_builder . getNavigationBreadCrumb ( 0 , Boolean . valueOf ( m_param ) . booleanValue ( ) ) ; } else { if ( m_endLevel != Integer . MIN_VALUE ) { m_items = m_builder . getNavigationBreadCrumb ( m_startLevel , m_endLevel ) ; } else { m_items = m_builder . getNavigationBreadCrumb ( m_startLevel , Boolean . valueOf ( m_param ) . booleanValue ( ) ) ; } } } break ; case treeForFolder : if ( m_resource == null ) { m_items = m_builder . getNavigationTreeForFolder ( m_startLevel , m_endLevel ) ; } else { m_items = m_builder . getNavigationTreeForFolder ( m_resource , m_startLevel , m_endLevel ) ; } break ; case forResource : default : List < CmsJspNavElement > items = new ArrayList < CmsJspNavElement > ( 1 ) ; if ( m_resource == null ) { items . add ( m_builder . getNavigationForResource ( ) ) ; } else { items . add ( m_builder . getNavigationForResource ( m_resource ) ) ; } m_items = items ; break ; } } return m_items ; |
public class AdministrationDao { /** * Imported the old and the new version of the resolver _ vis _ map . tab . The new
* version has an additional column for visibility status of the
* visualization .
* @ param path The path to the ANNIS file .
* @ param table The final table in the database of the resolver _ vis _ map table . */
private void importResolverVisMapTable ( String path , String table , String annisFileSuffix ) { } } | try { // count cols for detecting old resolver _ vis _ map table format
File resolver_vis_tab = new File ( path , table + annisFileSuffix ) ; if ( ! resolver_vis_tab . isFile ( ) ) { return ; } String firstLine ; try ( BufferedReader bReader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( resolver_vis_tab ) , "UTF-8" ) ) ) { firstLine = bReader . readLine ( ) ; } int cols = 9 ; // default number
if ( firstLine != null ) { String [ ] entries = firstLine . split ( "\t" ) ; cols = entries . length ; log . debug ( "the first row: {} amount of cols: {}" , entries , cols ) ; } switch ( cols ) { // old format
case 8 : readOldResolverVisMapFormat ( resolver_vis_tab ) ; break ; // new format
case 9 : bulkloadTableFromResource ( tableInStagingArea ( table ) , new FileSystemResource ( new File ( path , table + annisFileSuffix ) ) ) ; break ; default : log . error ( "invalid amount of cols" ) ; throw new RuntimeException ( ) ; } } catch ( IOException | FileAccessException e ) { log . error ( "could not read {}" , table , e ) ; } |
public class MeasureCollection { /** * add an empty entry e . g . if evaluation crashed internally */
public void addEmptyValue ( int index ) { } } | values [ index ] . add ( Double . NaN ) ; corrupted [ index ] = true ; |
public class LocalPropertySource { /** * Returns a property value from the underlying data store . */
@ Override public String getProperty ( String name ) { } } | loadProperties ( ) ; return properties == null ? null : properties . getProperty ( name ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.