signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JawrRequestHandler { /** * Checks if the path is valid and can be accessed .
* @ param requestedPath
* the requested path
* @ return true if the path is valid and can be accessed . */
protected boolean isValidRequestedPath ( String requestedPath ) { } } | boolean result = true ; if ( ! this . jawrConfig . isDebugModeOn ( ) && requestedPath . startsWith ( JawrConstant . WEB_INF_DIR_PREFIX ) || requestedPath . startsWith ( JawrConstant . META_INF_DIR_PREFIX ) ) { result = false ; } else { // If it ' s not a generated path check the extension file
if ( this . jawrConfig . isDebugModeOn ( ) && ! generatorRegistry . isPathGenerated ( requestedPath ) ) { String extension = FileNameUtils . getExtension ( requestedPath ) ; if ( ! extension . toLowerCase ( ) . equals ( resourceType ) ) { result = false ; } } } return result ; |
public class CommerceWishListItemUtil { /** * Returns a range of all the commerce wish list items where commerceWishListId = & # 63 ; and CPInstanceUuid = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceWishListItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceWishListId the commerce wish list ID
* @ param CPInstanceUuid the cp instance uuid
* @ param start the lower bound of the range of commerce wish list items
* @ param end the upper bound of the range of commerce wish list items ( not inclusive )
* @ return the range of matching commerce wish list items */
public static List < CommerceWishListItem > findByCW_CPI ( long commerceWishListId , String CPInstanceUuid , int start , int end ) { } } | return getPersistence ( ) . findByCW_CPI ( commerceWishListId , CPInstanceUuid , start , end ) ; |
public class R { /** * Adds a CSS class to a component within a facet .
* @ param f
* the facet
* @ param cname
* the class name of the component to be manipulated .
* @ param aclass
* the CSS class to be added */
public static void addClass2FacetComponent ( UIComponent f , String cname , String aclass ) { } } | // If the facet contains only one component , getChildCount ( ) = 0 and the
// Facet is the UIComponent
if ( f . getClass ( ) . getName ( ) . endsWith ( cname ) ) { addClass2Component ( f , aclass ) ; } else { if ( f . getChildCount ( ) > 0 ) { for ( UIComponent c : f . getChildren ( ) ) { if ( c . getClass ( ) . getName ( ) . endsWith ( cname ) ) { addClass2Component ( c , aclass ) ; } } } } |
public class TcpListener { /** * / * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . listeners . AbstractListener # stop ( ) */
@ Override public synchronized void stop ( ) { } } | if ( ! started ) return ; log . info ( "Stopping listener [" + getName ( ) + "]" ) ; stopRequired = true ; // Close the listen socket
closeServerSocket ( ) ; // Wait for listener thread to stop
try { if ( listenerThread != null ) listenerThread . join ( ) ; } catch ( InterruptedException e ) { log . error ( "Wait for listener thread termination was interrupted" ) ; } finally { listenerThread = null ; } // Then stop remaining clients
closeRemainingClients ( ) ; started = false ; |
public class ConferenceServlet { /** * { @ inheritDoc } */
protected void doBye ( SipServletRequest request ) throws ServletException , IOException { } } | logger . info ( "MediaPlaybackServlet: Got BYE request:\n" + request ) ; SipURI from = ( SipURI ) request . getFrom ( ) . getURI ( ) ; SipURI to = ( SipURI ) request . getTo ( ) . getURI ( ) ; ConferenceCenter . getInstance ( ) . getConference ( to . getUser ( ) ) . removeParticipant ( from . toString ( ) ) ; SipServletResponse sipServletResponse = request . createResponse ( 200 ) ; sipServletResponse . send ( ) ; |
public class AbstractCache { /** * A convenience method that will retrieve ( or create ) the task associated
* with the key , wait for a result to be ready , and process any thrown
* exceptions .
* @ param key
* Key for the associated task , usually an object or template
* name
* @ return T the result associated with the given key
* @ throws RuntimeException if there are any exceptions during evaluation
* @ throws ValidationException if there are any exceptions during validation */
public T waitForResult ( String key ) throws RuntimeException , ValidationException { } } | // Pull out the Future associated with the key .
Future < T > future = retrieve ( key , true ) ; // Now wait for the result and process any exceptions .
T result = null ; try { result = future . get ( ) ; } catch ( InterruptedException ie ) { throw EvaluationException . create ( MSG_INTERRUPTED_THREAD , key ) ; } catch ( CancellationException ce ) { throw EvaluationException . create ( MSG_CANCELLED_THREAD , key ) ; } catch ( ExecutionException ee ) { throw ExceptionUtils . launder ( ee ) ; } return result ; |
public class ListDoms { /** * Login using nic handler */
public static void main ( String [ ] args ) throws Exception { } } | if ( args . length != 2 ) { System . out . println ( "usage: ListDoms nic password" ) ; return ; } String nic = args [ 0 ] ; String password = args [ 1 ] ; list2 ( nic , password ) ; |
public class LToFltFunctionBuilder { /** * Adds full new case for the argument that are of specific classes ( matched by instanceOf , null is a wildcard ) . */
@ Nonnull public < V extends T > LToFltFunctionBuilder < T > aCase ( Class < V > argC , LToFltFunction < V > function ) { } } | PartialCaseWithFltProduct . The pc = partialCaseFactoryMethod ( a -> ( argC == null || argC . isInstance ( a ) ) ) ; pc . evaluate ( function ) ; return self ( ) ; |
public class ResourceStorageFacade { /** * Finds or creates a ResourceStorageLoadable for the given resource .
* Clients should first call shouldLoadFromStorage to check whether there exists a storage version
* of the given resource .
* @ return an IResourceStorageLoadable */
@ Override public ResourceStorageLoadable getOrCreateResourceStorageLoadable ( final StorageAwareResource resource ) { } } | try { final ResourceStorageProviderAdapter stateProvider = IterableExtensions . < ResourceStorageProviderAdapter > head ( Iterables . < ResourceStorageProviderAdapter > filter ( resource . getResourceSet ( ) . eAdapters ( ) , ResourceStorageProviderAdapter . class ) ) ; if ( ( stateProvider != null ) ) { final ResourceStorageLoadable inputStream = stateProvider . getResourceStorageLoadable ( resource ) ; if ( ( inputStream != null ) ) { return inputStream ; } } InputStream _xifexpression = null ; boolean _exists = resource . getResourceSet ( ) . getURIConverter ( ) . exists ( this . getBinaryStorageURI ( resource . getURI ( ) ) , CollectionLiterals . < Object , Object > emptyMap ( ) ) ; if ( _exists ) { _xifexpression = resource . getResourceSet ( ) . getURIConverter ( ) . createInputStream ( this . getBinaryStorageURI ( resource . getURI ( ) ) ) ; } else { InputStream _xblockexpression = null ; { final AbstractFileSystemAccess2 fsa = this . getFileSystemAccess ( resource ) ; final String outputRelativePath = this . computeOutputPath ( resource ) ; _xblockexpression = fsa . readBinaryFile ( outputRelativePath ) ; } _xifexpression = _xblockexpression ; } final InputStream inputStream_1 = _xifexpression ; return this . createResourceStorageLoadable ( inputStream_1 ) ; } catch ( Throwable _e ) { throw Exceptions . sneakyThrow ( _e ) ; } |
public class Filter { /** * Returns a cached filter instance that operates on the given type , which
* prevents any results from passing through .
* @ param type type of Storable that query is made against
* @ return canonical Filter instance
* @ see ClosedFilter */
public static < S extends Storable > ClosedFilter < S > getClosedFilter ( Class < S > type ) { } } | SoftValuedCache < Object , Filter < S > > filterCache = getFilterCache ( type ) ; synchronized ( filterCache ) { Filter < S > filter = filterCache . get ( CLOSED_KEY ) ; if ( filter == null ) { filter = ClosedFilter . getCanonical ( type ) ; filterCache . put ( CLOSED_KEY , filter ) ; } return ( ClosedFilter < S > ) filter ; } |
public class GDLHandler { /** * Returns a cache that contains a mapping from variables to vertex instances .
* @ param includeUserDefined true , iff user - defined variables shall be included in the cache
* @ param includeAutoGenerated true , iff auto - generated variables shall be included in the cache
* @ return immutable vertex cache */
public Map < String , Vertex > getVertexCache ( boolean includeUserDefined , boolean includeAutoGenerated ) { } } | return loader . getVertexCache ( includeUserDefined , includeAutoGenerated ) ; |
public class CommerceWarehouseItemPersistenceImpl { /** * Returns the last commerce warehouse item in the ordered set where commerceWarehouseId = & # 63 ; .
* @ param commerceWarehouseId the commerce warehouse ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce warehouse item , or < code > null < / code > if a matching commerce warehouse item could not be found */
@ Override public CommerceWarehouseItem fetchByCommerceWarehouseId_Last ( long commerceWarehouseId , OrderByComparator < CommerceWarehouseItem > orderByComparator ) { } } | int count = countByCommerceWarehouseId ( commerceWarehouseId ) ; if ( count == 0 ) { return null ; } List < CommerceWarehouseItem > list = findByCommerceWarehouseId ( commerceWarehouseId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class CompileUnit { /** * this method actually does not compile a class . It ' s only
* a marker that this type has to be compiled by the CompilationUnit
* at the end of a parse step no node should be be left . */
public void addClassNodeToCompile ( ClassNode node , SourceUnit location ) { } } | String nodeName = node . getName ( ) ; classesToCompile . put ( nodeName , node ) ; classNameToSource . put ( nodeName , location ) ; |
public class Uris { /** * Returns a new URI that is the given URI resolved with the given query parameters resolved ( appended ) . Both
* the keys and values of each entry in the map will be normalized as queryParameter
* @ param uri the base URI to resolve against
* @ param params the query parameters to resolve
* @ param strict whether or not to perform strict escaping . ( defaults to false )
* @ param strictNorm whether or not to perform strict normalization . ( defaults to strict )
* @ return the resolved , normalized URI
* @ throws NormalizationException if there was a problem normalizing the URL */
public static URI resolveParams ( final URI uri , final Map < String , String > params , final boolean strict , final boolean strictNorm ) throws NormalizationException { } } | return resolve ( uri , "&" + PARAM_JOINER . join ( getParametersFromMap ( params , strict ) ) , strictNorm ) ; |
public class Type3Message { /** * Returns the default flags for a Type - 3 message created in response
* to the given Type - 2 message in the current environment .
* @ param tc
* context to use
* @ param type2
* The Type - 2 message .
* @ return An < code > int < / code > containing the default flags . */
public static int getDefaultFlags ( CIFSContext tc , Type2Message type2 ) { } } | if ( type2 == null ) return getDefaultFlags ( tc ) ; int flags = NTLMSSP_NEGOTIATE_NTLM | NTLMSSP_NEGOTIATE_VERSION ; flags |= type2 . getFlag ( NTLMSSP_NEGOTIATE_UNICODE ) ? NTLMSSP_NEGOTIATE_UNICODE : NTLMSSP_NEGOTIATE_OEM ; return flags ; |
public class DriverShim { /** * Command - line test . Dynamically loads the driver given at the command
* line , opens a connection , and get the row count of the given table to
* prove it worked . Usage : java - cp . DriverShim someDriver . jar
* org . SomeDriver jdbcURL name pwd tbl */
public static void main ( String [ ] args ) throws Exception { } } | if ( args . length == 6 ) { DriverShim . loadAndRegister ( new File ( args [ 0 ] ) , args [ 1 ] ) ; Connection conn = DriverManager . getConnection ( args [ 2 ] , args [ 3 ] , args [ 4 ] ) ; String tbl = args [ 5 ] ; PreparedStatement st = conn . prepareStatement ( "SELECT COUNT(*) FROM ?" ) ; st . setString ( 1 , tbl ) ; ResultSet rs = st . executeQuery ( ) ; rs . next ( ) ; System . out . println ( "The " + tbl + " table has " + rs . getInt ( 1 ) + " rows." ) ; rs . close ( ) ; st . close ( ) ; conn . close ( ) ; } else { System . out . println ( "Usage: java -cp . DriverShim someDriver.jar" + " org.SomeDriver jdbcURL name pwd tbl" ) ; System . exit ( 1 ) ; } |
public class TypeSerializerConfigSnapshot { /** * Creates a serializer using this configuration , that is capable of reading data
* written by the serializer described by this configuration .
* @ return the restored serializer . */
@ Override public final TypeSerializer < T > restoreSerializer ( ) { } } | if ( serializer == null ) { throw new IllegalStateException ( "Trying to restore the prior serializer via TypeSerializerConfigSnapshot, " + "but the prior serializer has not been set." ) ; } else if ( serializer instanceof UnloadableDummyTypeSerializer ) { Throwable originalError = ( ( UnloadableDummyTypeSerializer < ? > ) serializer ) . getOriginalError ( ) ; throw new IllegalStateException ( "Could not Java-deserialize TypeSerializer while restoring checkpoint metadata for serializer " + "snapshot '" + getClass ( ) . getName ( ) + "'. " + "Please update to the TypeSerializerSnapshot interface that removes Java Serialization to avoid " + "this problem in the future." , originalError ) ; } else { return this . serializer ; } |
public class FileResourceFactory { /** * Utility method for acquiring a reference to the NIOFileResource class constructor
* without asking the class loader every single time after we know it ' s not there .
* @ param args
* The constructor argument types
* @ return the constructor object */
protected Constructor < ? > getNIOFileResourceConstructor ( Class < ? > ... args ) { } } | final String method = "getNIOFileResourceConstructor" ; // $ NON - NLS - 1 $
if ( tryNIO && nioFileResourceConstructor == null ) { try { Class < ? > clazz = getNIOFileResourceClass ( ) ; if ( clazz != null ) nioFileResourceConstructor = clazz . getConstructor ( args ) ; } catch ( NoSuchMethodException e ) { tryNIO = false ; // Don ' t try this again .
if ( log . isLoggable ( Level . SEVERE ) ) { log . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } if ( log . isLoggable ( Level . WARNING ) ) log . logp ( Level . WARNING , CLAZZ , method , WARN_MESSAGE ) ; } catch ( SecurityException e ) { tryNIO = false ; // Don ' t try this again .
if ( log . isLoggable ( Level . SEVERE ) ) { log . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } if ( log . isLoggable ( Level . WARNING ) ) log . logp ( Level . WARNING , CLAZZ , method , WARN_MESSAGE ) ; } } return nioFileResourceConstructor ; |
public class DateUtils { /** * Checks the second , hour , month , day , month and year are equal . */
@ SuppressWarnings ( "deprecation" ) public static boolean dateTimeEquals ( java . util . Date d1 , java . util . Date d2 ) { } } | if ( d1 == null || d2 == null ) { return false ; } return d1 . getDate ( ) == d2 . getDate ( ) && d1 . getMonth ( ) == d2 . getMonth ( ) && d1 . getYear ( ) == d2 . getYear ( ) && d1 . getHours ( ) == d2 . getHours ( ) && d1 . getMinutes ( ) == d2 . getMinutes ( ) && d1 . getSeconds ( ) == d2 . getSeconds ( ) ; |
public class AbstractCluster { /** * / * ( non - Javadoc )
* @ see me . prettyprint . cassandra . service . Cluster # describeKeyspaces ( ) */
@ Override public List < KeyspaceDefinition > describeKeyspaces ( ) throws HectorException { } } | Operation < List < KeyspaceDefinition > > op = new Operation < List < KeyspaceDefinition > > ( OperationType . META_READ , getCredentials ( ) ) { @ Override public List < KeyspaceDefinition > execute ( Cassandra . Client cassandra ) throws HectorException { try { return ThriftKsDef . fromThriftList ( cassandra . describe_keyspaces ( ) ) ; } catch ( Exception e ) { throw xtrans . translate ( e ) ; } } } ; connectionManager . operateWithFailover ( op ) ; return op . getResult ( ) ; |
public class HBaseSchemaManager { /** * Gets the table descriptor .
* @ param clazz
* the clazz
* @ param entityType
* the entity type
* @ param tableName
* the table name
* @ return the table descriptor */
private HTableDescriptor getTableDescriptor ( Class < ? > clazz , EntityType < ? > entityType , String tableName ) { } } | try { AbstractManagedType < ? > ent = ( AbstractManagedType < ? > ) entityType ; HTableDescriptor tableDescriptor = null ; String hTableName = HBaseUtils . getHTableName ( databaseName , tableName ) ; tableDescriptor = ! admin . tableExists ( TableName . valueOf ( hTableName ) ) ? new HTableDescriptor ( TableName . valueOf ( hTableName ) ) : admin . getTableDescriptor ( TableName . valueOf ( hTableName ) ) ; addColumnFamilyAndSetProperties ( tableDescriptor , tableName ) ; // Add column families for @ SecondaryTable
List < String > secondaryTables = ( ( DefaultEntityAnnotationProcessor ) ent . getEntityAnnotation ( ) ) . getSecondaryTablesName ( ) ; for ( String secTable : secondaryTables ) { addColumnFamilyAndSetProperties ( tableDescriptor , secTable ) ; } // handle @ JoinTable for @ ManyToMany
List < Relation > relations = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , clazz ) . getRelations ( ) ; addJoinTable ( relations ) ; // @ CollectionTable is not handled .
return tableDescriptor ; } catch ( IOException ex ) { logger . error ( "Either table isn't in enabled state or some network problem, Caused by: " , ex ) ; throw new SchemaGenerationException ( ex , "Hbase" ) ; } |
public class HamtPMap { /** * Moves the root into the appropriate child . */
private HamtPMap < K , V > vacateRoot ( ) { } } | int bucket = bucket ( this . hash ) ; int bucketMask = 1 << bucket ; int index = index ( bucketMask ) ; if ( ( mask & bucketMask ) != 0 ) { HamtPMap < K , V > newChild = children [ index ] . plus ( this . key , shift ( this . hash ) , this . value ) ; return new HamtPMap < > ( null , 0 , null , mask , replaceChild ( children , index , newChild ) ) ; } HamtPMap < K , V > newChild = new HamtPMap < > ( this . key , shift ( this . hash ) , this . value , 0 , emptyChildren ( ) ) ; return new HamtPMap < > ( null , 0 , null , mask | bucketMask , insertChild ( children , index , newChild ) ) ; |
public class XPathFactoryFinder { /** * < p > Lookup a { @ link XPathFactory } for the given object model . < / p >
* @ param uri identifies the object model . */
private XPathFactory _newFactory ( String uri ) { } } | XPathFactory xpf ; String propertyName = SERVICE_CLASS . getName ( ) + ":" + uri ; // system property look up
try { if ( debug ) debugPrintln ( "Looking up system property '" + propertyName + "'" ) ; String r = System . getProperty ( propertyName ) ; if ( r != null && r . length ( ) > 0 ) { if ( debug ) debugPrintln ( "The value is '" + r + "'" ) ; xpf = createInstance ( r ) ; if ( xpf != null ) return xpf ; } else if ( debug ) { debugPrintln ( "The property is undefined." ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } // try to read from $ java . home / lib / jaxp . properties
try { String factoryClassName = CacheHolder . cacheProps . getProperty ( propertyName ) ; if ( debug ) debugPrintln ( "found " + factoryClassName + " in $java.home/jaxp.properties" ) ; if ( factoryClassName != null ) { xpf = createInstance ( factoryClassName ) ; if ( xpf != null ) { return xpf ; } } } catch ( Exception ex ) { if ( debug ) { ex . printStackTrace ( ) ; } } // try META - INF / services files
for ( URL resource : createServiceFileIterator ( ) ) { if ( debug ) debugPrintln ( "looking into " + resource ) ; try { xpf = loadFromServicesFile ( uri , resource . toExternalForm ( ) , resource . openStream ( ) ) ; if ( xpf != null ) return xpf ; } catch ( IOException e ) { if ( debug ) { debugPrintln ( "failed to read " + resource ) ; e . printStackTrace ( ) ; } } } // platform default
if ( uri . equals ( XPathFactory . DEFAULT_OBJECT_MODEL_URI ) ) { if ( debug ) debugPrintln ( "attempting to use the platform default W3C DOM XPath lib" ) ; return createInstance ( "org.apache.xpath.jaxp.XPathFactoryImpl" ) ; } if ( debug ) debugPrintln ( "all things were tried, but none was found. bailing out." ) ; return null ; |
public class ParaClient { /** * Removes a validation constraint for a given field .
* @ param type a type
* @ param field a field name
* @ param constraintName the name of the constraint to remove
* @ return a map containing all validation constraints for this type . */
public Map < String , Map < String , Map < String , Map < String , ? > > > > removeValidationConstraint ( String type , String field , String constraintName ) { } } | if ( StringUtils . isBlank ( type ) || StringUtils . isBlank ( field ) || StringUtils . isBlank ( constraintName ) ) { return Collections . emptyMap ( ) ; } return getEntity ( invokeDelete ( Utils . formatMessage ( "_constraints/{0}/{1}/{2}" , type , field , constraintName ) , null ) , Map . class ) ; |
public class WFG3 { /** * WFG3 t1 transformation */
public float [ ] t1 ( float [ ] z , int k ) { } } | float [ ] result = new float [ z . length ] ; System . arraycopy ( z , 0 , result , 0 , k ) ; for ( int i = k ; i < z . length ; i ++ ) { result [ i ] = ( new Transformations ( ) ) . sLinear ( z [ i ] , ( float ) 0.35 ) ; } return result ; |
public class Ordering { /** * Returns the greatest of the specified values according to this ordering . If there are multiple
* greatest values , the first of those is returned . The iterator will be left exhausted : its
* { @ code hasNext ( ) } method will return { @ code false } .
* < p > < b > Java 8 users : < / b > Continue to use this method for now . After the next release of Guava ,
* use { @ code Streams . stream ( iterator ) . max ( thisComparator ) . get ( ) } instead ( but note that it does
* not guarantee which tied maximum element is returned ) .
* @ param iterator the iterator whose maximum element is to be determined
* @ throws NoSuchElementException if { @ code iterator } is empty
* @ throws ClassCastException if the parameters are not < i > mutually comparable < / i > under this
* ordering .
* @ since 11.0 */
@ CanIgnoreReturnValue // TODO ( kak ) : Consider removing this
public < E extends T > E max ( Iterator < E > iterator ) { } } | // let this throw NoSuchElementException as necessary
E maxSoFar = iterator . next ( ) ; while ( iterator . hasNext ( ) ) { maxSoFar = max ( maxSoFar , iterator . next ( ) ) ; } return maxSoFar ; |
public class MetadataStore { /** * Function to delete the specified store from Metadata store . This involves
* 1 . Remove entry from the ConfigurationStorageEngine for STORES .
* 2 . Update the metadata cache .
* 3 . Re - create the ' stores . xml ' key
* @ param storeName specifies name of the store to be deleted . */
public void deleteStoreDefinition ( String storeName ) { } } | // acquire write lock
writeLock . lock ( ) ; try { // Check if store exists
if ( ! this . storeNames . contains ( storeName ) ) { throw new VoldemortException ( "Requested store to be deleted does not exist !" ) ; } // Otherwise remove from the STORES directory . Note : The version
// argument is not required here since the
// ConfigurationStorageEngine simply ignores this .
this . storeDefinitionsStorageEngine . delete ( storeName , null ) ; // Update the metadata cache
this . metadataCache . remove ( storeName ) ; // Re - initialize the store definitions . This is primarily required
// to re - create the value for key : ' stores . xml ' . This is necessary
// for backwards compatibility .
initStoreDefinitions ( null ) ; } finally { writeLock . unlock ( ) ; } |
public class ModularParser { /** * Checks if the configuration is runnable . */
private boolean runConfig ( ) { } } | if ( lineSeparator == null ) { logger . debug ( "Set lineSeparator" ) ; return false ; } if ( categoryIdentifers == null ) { logger . warn ( "Set categoryIdentifers" ) ; return false ; } if ( languageIdentifers == null ) { logger . warn ( "Set languageIdentifers" ) ; return false ; } if ( imageIdentifers == null ) { logger . warn ( "Set imageIdentifers" ) ; return false ; } if ( templateParser == null ) { logger . warn ( "Set templateParser" ) ; return false ; } return true ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertImageResolutionYBaseToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class WebApplicationHandlerMBean { protected void defineManagedResource ( ) { } } | super . defineManagedResource ( ) ; defineAttribute ( "acceptRanges" ) ; defineAttribute ( "filterChainsCached" ) ; defineAttribute ( "filters" , READ_ONLY , ON_MBEAN ) ; _webappHandler = ( WebApplicationHandler ) getManagedResource ( ) ; |
public class Mixin { /** * Loads a single script from an URL into the script engine
* @ param engine the engine that should evaluate the script
* @ param scriptReader the script source to be loaded into the script */
private static void loadScript ( ScriptEngine engine , Supplier < Reader > scriptReader ) { } } | try ( Reader reader = scriptReader . get ( ) ) { engine . eval ( reader ) ; } catch ( ScriptException | IOException e ) { throw new RuntimeException ( e ) ; } |
public class SftpFileAttributes { /** * Set the GID of this file .
* @ param gid */
public void setGID ( String gid ) { } } | if ( version > 3 ) { flags |= SSH_FILEXFER_ATTR_OWNERGROUP ; } else flags |= SSH_FILEXFER_ATTR_UIDGID ; this . gid = gid ; |
public class QueryRow { /** * This method was created in VisualAge .
* @ return java . lang . String
* @ param propertyName java . lang . String */
public String getValue ( String propertyName ) throws JspCoreException { } } | // use current row
// get the value from hashtable and give it . Return null if property
// not found ? ? should we throw an error here ? ?
String val = ( String ) row . get ( propertyName ) ; /* if ( val = = null )
{ / / invalid attribute
throw new JasperException ( ( JspConstants . InvalidAttrName ) + propertyName ) ;
else */
return val ; |
public class ScanResultObject { /** * Load the class named returned by { @ link # getClassInfo ( ) } , or if that returns null , the class named by
* { @ link # getClassName ( ) } . Returns a { @ code Class < ? > } reference for the class , cast to the requested superclass
* or interface type .
* @ param < T >
* the superclass or interface type
* @ param superclassOrInterfaceType
* The type to cast the resulting class reference to .
* @ param ignoreExceptions
* If true , ignore classloading exceptions and return null on failure .
* @ return The { @ code Class < ? > } reference for the referenced class , or null if the class could not be loaded ( or
* casting failed ) and ignoreExceptions is true .
* @ throws IllegalArgumentException
* if the class could not be loaded or cast , and ignoreExceptions was false . */
< T > Class < T > loadClass ( final Class < T > superclassOrInterfaceType , final boolean ignoreExceptions ) { } } | if ( classRef == null ) { classRef = scanResult . loadClass ( getClassInfoNameOrClassName ( ) , superclassOrInterfaceType , ignoreExceptions ) ; } @ SuppressWarnings ( "unchecked" ) final Class < T > classT = ( Class < T > ) classRef ; return classT ; |
public class WordVectorSerializer { /** * This method saves vocab cache to provided OutputStream .
* Please note : it saves only vocab content , so it ' s suitable mostly for BagOfWords / TF - IDF vectorizers
* @ param vocabCache
* @ param stream
* @ throws UnsupportedEncodingException */
public static void writeVocabCache ( @ NonNull VocabCache < VocabWord > vocabCache , @ NonNull OutputStream stream ) throws IOException { } } | try ( PrintWriter writer = new PrintWriter ( new BufferedWriter ( new OutputStreamWriter ( stream , StandardCharsets . UTF_8 ) ) ) ) { // saving general vocab information
writer . println ( "" + vocabCache . numWords ( ) + " " + vocabCache . totalNumberOfDocs ( ) + " " + vocabCache . totalWordOccurrences ( ) ) ; for ( int x = 0 ; x < vocabCache . numWords ( ) ; x ++ ) { VocabWord word = vocabCache . elementAtIndex ( x ) ; writer . println ( word . toJSON ( ) ) ; } } |
public class DistBlockIntegrityMonitor { /** * Get the status of the entire block integrity monitor .
* The status returned represents the aggregation of the statuses of all the
* integrity monitor ' s components .
* @ return The status of the block integrity monitor */
@ Override public BlockIntegrityMonitor . Status getAggregateStatus ( ) { } } | Status fixer = corruptionWorker . getStatus ( ) ; Status copier = decommissioningWorker . getStatus ( ) ; List < JobStatus > jobs = new ArrayList < JobStatus > ( ) ; List < JobStatus > simFailedJobs = new ArrayList < JobStatus > ( ) ; List < JobStatus > failedJobs = new ArrayList < JobStatus > ( ) ; List < String > highPriFileNames = new ArrayList < String > ( ) ; int numHighPriFiles = 0 ; int numLowPriFiles = 0 ; int numLowestPriFiles = 0 ; if ( fixer != null ) { jobs . addAll ( fixer . jobs ) ; simFailedJobs . addAll ( fixer . simFailJobs ) ; failedJobs . addAll ( fixer . failJobs ) ; if ( fixer . highPriorityFileNames != null ) { highPriFileNames . addAll ( fixer . highPriorityFileNames ) ; } numHighPriFiles += fixer . highPriorityFiles ; numLowPriFiles += fixer . lowPriorityFiles ; numLowestPriFiles += fixer . lowestPriorityFiles ; } if ( copier != null ) { jobs . addAll ( copier . jobs ) ; simFailedJobs . addAll ( copier . simFailJobs ) ; failedJobs . addAll ( copier . failJobs ) ; if ( copier . highPriorityFileNames != null ) { highPriFileNames . addAll ( copier . highPriorityFileNames ) ; } numHighPriFiles += copier . highPriorityFiles ; numLowPriFiles += copier . lowPriorityFiles ; numLowestPriFiles += copier . lowestPriorityFiles ; } return new Status ( numHighPriFiles , numLowPriFiles , numLowestPriFiles , jobs , highPriFileNames , failedJobs , simFailedJobs ) ; |
public class JMElasticsearchBulk { /** * Delete bulk docs async .
* @ param index the index
* @ param type the type
* @ param filterQueryBuilder the filter query builder */
public void deleteBulkDocsAsync ( String index , String type , QueryBuilder filterQueryBuilder ) { } } | executeBulkRequestAsync ( buildDeleteBulkRequestBuilder ( buildExtractDeleteRequestBuilderList ( index , type , filterQueryBuilder ) ) ) ; |
public class ResourceResolutionBuilderHelper { /** * Build the list of resource resolvers .
* The list is ordered to ensure that empty string lookup is always the last
* registered .
* If some custom resolvers are registered , they are used before default
* ones in the order they were registered .
* @ return the list of resource resolvers */
public List < ResourceResolver > buildResolvers ( ) { } } | List < ResourceResolver > resolvers = new ArrayList < > ( ) ; resolvers . addAll ( customResolvers ) ; // ensure that default lookup is always the last registered
List < ResolverHelper > helpers = new ArrayList < > ( ) ; if ( classPath != null ) { helpers . add ( new ResolverHelper ( classPath . getLookups ( ) , classPath ) ) ; } if ( file != null ) { helpers . add ( new ResolverHelper ( file . getLookups ( ) , file ) ) ; } if ( string != null ) { helpers . add ( new ResolverHelper ( string . getLookups ( ) , string ) ) ; } Collections . sort ( helpers , new PrefixComparator ( ) ) ; for ( ResolverHelper helper : helpers ) { helper . register ( resolvers ) ; } return resolvers ; |
public class Utils { /** * Verify that the given value is either " true " or " false " and return the corresponding
* boolean value . If the value is invalid , an IllegalArgumentException is thrown .
* @ param value Candidate boolean value in string form .
* @ return Boolean value of string if valid .
* @ throws IllegalArgumentException If the valie is not " true " or " false " . */
public static boolean getBooleanValue ( String value ) throws IllegalArgumentException { } } | require ( "true" . equalsIgnoreCase ( value ) || "false" . equalsIgnoreCase ( value ) , "'true' or 'false' expected: " + value ) ; return "true" . equalsIgnoreCase ( value ) ; |
public class AliasFactory { /** * Create an alias instance for the given class and variable name
* @ param < A >
* @ param cl type for alias
* @ param var variable name for the underlying expression
* @ return alias instance */
@ SuppressWarnings ( "unchecked" ) public < A > A createAliasForVariable ( Class < A > cl , String var ) { } } | try { Expression < ? > path = pathCache . get ( Pair . < Class < ? > , String > of ( cl , var ) ) ; return ( A ) proxyCache . get ( Pair . < Class < ? > , Expression < ? > > of ( cl , path ) ) ; } catch ( ExecutionException e ) { throw new QueryException ( e ) ; } |
public class ThreeViewEstimateMetricScene { /** * Prunes the features with the largest reprojection error */
private void pruneOutliers ( BundleAdjustment < SceneStructureMetric > bundleAdjustment ) { } } | // see if it ' s configured to not prune
if ( pruneFraction == 1.0 ) return ; PruneStructureFromSceneMetric pruner = new PruneStructureFromSceneMetric ( structure , observations ) ; pruner . pruneObservationsByErrorRank ( pruneFraction ) ; pruner . pruneViews ( 10 ) ; pruner . prunePoints ( 1 ) ; bundleAdjustment . setParameters ( structure , observations ) ; bundleAdjustment . optimize ( structure ) ; if ( verbose != null ) { verbose . println ( "\nCamera" ) ; for ( int i = 0 ; i < structure . cameras . length ; i ++ ) { verbose . println ( structure . cameras [ i ] . getModel ( ) . toString ( ) ) ; } verbose . println ( "\n\nworldToView" ) ; for ( int i = 0 ; i < structure . views . length ; i ++ ) { verbose . println ( structure . views [ i ] . worldToView . toString ( ) ) ; } verbose . println ( "Fit Score: " + bundleAdjustment . getFitScore ( ) ) ; } |
public class ChatController { /** * Updates all conversation states .
* @ return Result of synchronisation with services . */
private Observable < ChatResult > synchroniseConversations ( ) { } } | return checkState ( ) . flatMap ( client -> client . service ( ) . messaging ( ) . getConversations ( false ) . flatMap ( result -> persistenceController . loadAllConversations ( ) . map ( chatConversationBases -> compare ( result . isSuccessful ( ) , result . getResult ( ) , chatConversationBases ) ) ) . flatMap ( this :: updateLocalConversationList ) . flatMap ( result -> lookForMissingEvents ( client , result ) ) . map ( result -> new ChatResult ( result . isSuccessful , null ) ) ) ; |
public class CmsSitemapToolbar { /** * Deactivates all toolbar buttons . < p > */
public void deactivateAll ( ) { } } | for ( Widget button : getAll ( ) ) { if ( button instanceof I_CmsToolbarActivatable ) { ( ( I_CmsToolbarActivatable ) button ) . setEnabled ( false ) ; } else if ( button instanceof CmsToggleButton ) { ( ( CmsToggleButton ) button ) . setEnabled ( false ) ; } } |
public class CellValidator { /** * Generates the cassandra marshaller ( { @ link org . apache . cassandra . db . marshal . AbstractType } ) for this CellValidator .
* @ return an instance of the cassandra marshaller for this CellValidator . */
public AbstractType < ? > getAbstractType ( ) { } } | if ( abstractType != null ) { return abstractType ; } try { if ( validatorKind == Kind . NOT_A_COLLECTION ) { abstractType = MAP_ABSTRACT_TYPE_CLASS_TO_ABSTRACT_TYPE . get ( forName ( validatorClassName ) ) ; } else { throw new DeepGenericException ( "Cannot determine collection kind for " + validatorKind ) ; } } catch ( ClassNotFoundException e ) { throw new DeepGenericException ( e ) ; } return abstractType ; |
public class IoUtil { /** * 转换为 { @ link PushbackInputStream } < br >
* 如果传入的输入流已经是 { @ link PushbackInputStream } , 强转返回 , 否则新建一个
* @ param in { @ link InputStream }
* @ param pushBackSize 推后的byte数
* @ return { @ link PushbackInputStream }
* @ since 3.1.0 */
public static PushbackInputStream toPushbackStream ( InputStream in , int pushBackSize ) { } } | return ( in instanceof PushbackInputStream ) ? ( PushbackInputStream ) in : new PushbackInputStream ( in , pushBackSize ) ; |
public class DynamoDBMapperTableModel { /** * { @ inheritDoc } */
@ Override public Map < String , AttributeValue > convert ( final T object ) { } } | final Map < String , AttributeValue > map = new LinkedHashMap < String , AttributeValue > ( ) ; for ( final DynamoDBMapperFieldModel < T , Object > field : fields ( ) ) { try { final AttributeValue value = field . getAndConvert ( object ) ; if ( value != null ) { map . put ( field . name ( ) , value ) ; } } catch ( final RuntimeException e ) { throw new DynamoDBMappingException ( targetType . getSimpleName ( ) + "[" + field . name ( ) + "]; could not convert attribute" , e ) ; } } return map ; |
public class FunctionDefaultConfigMarshaller { /** * Marshall the given parameter object . */
public void marshall ( FunctionDefaultConfig functionDefaultConfig , ProtocolMarshaller protocolMarshaller ) { } } | if ( functionDefaultConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( functionDefaultConfig . getExecution ( ) , EXECUTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MultiMatcher { /** * Find the result with the maximum end position and use it as delegate . */
private static Result findResultWithMaxEnd ( List < Result > successResults ) { } } | return Collections . max ( successResults , new Comparator < Result > ( ) { @ Override public int compare ( Result o1 , Result o2 ) { return Integer . valueOf ( o1 . end ( ) ) . compareTo ( o2 . end ( ) ) ; } } ) ; |
public class RealtimeThroughputCondition { /** * analysisType的最大value */
public int getMax ( ) { } } | List < Integer > value = new ArrayList < Integer > ( ) ; for ( AnalysisType at : analysisType ) { value . add ( at . getValue ( ) ) ; } return Collections . max ( value ) ; |
public class AmazonSimpleDBClient { /** * Returns additional metadata for a previously executed successful , request , typically used for debugging issues
* where a service isn ' t acting as expected . This data isn ' t considered part of the result data returned by an
* operation , so it ' s available through this separate , diagnostic interface .
* Response metadata is only cached for a limited period of time , so if you need to access this extra diagnostic
* information for an executed request , you should use this method to retrieve it as soon as possible after
* executing the request .
* @ param request
* The originally executed request
* @ return The response metadata for the specified request , or null if none is available . */
public com . amazonaws . services . simpledb . SimpleDBResponseMetadata getCachedResponseMetadata ( AmazonWebServiceRequest request ) { } } | ResponseMetadata metadata = client . getResponseMetadataForRequest ( request ) ; if ( metadata != null ) return new com . amazonaws . services . simpledb . SimpleDBResponseMetadata ( metadata ) ; else return null ; |
public class TileWriter { /** * Write formatted tiles
* @ param tileDao
* @ param directory
* @ param imageFormat
* @ param width
* @ param height
* @ param tileType
* @ param rawImage
* @ return
* @ throws IOException */
private static int writeFormatTiles ( TileDao tileDao , File directory , String imageFormat , Integer width , Integer height , TileFormatType tileType , boolean rawImage ) throws IOException { } } | int tileCount = 0 ; // Get the projection of the tile matrix set
SpatialReferenceSystem srs = tileDao . getTileMatrixSet ( ) . getSrs ( ) ; Projection projection = srs . getProjection ( ) ; // Get the transformation to web mercator
Projection webMercator = ProjectionFactory . getProjection ( ProjectionConstants . EPSG_WEB_MERCATOR ) ; ProjectionTransform projectionToWebMercator = projection . getTransformation ( webMercator ) ; // Get the bounding box of actual tiles
BoundingBox zoomBoundingBox = tileDao . getBoundingBox ( ) ; if ( projection . isUnit ( Units . DEGREES ) ) { zoomBoundingBox = TileBoundingBoxUtils . boundDegreesBoundingBoxWithWebMercatorLimits ( zoomBoundingBox ) ; } BoundingBox zoomWebMercatorBoundingBox = zoomBoundingBox . transform ( projectionToWebMercator ) ; GeoPackageTileRetriever retriever = null ; if ( rawImage ) { retriever = new GeoPackageTileRetriever ( tileDao ) ; } else { retriever = new GeoPackageTileRetriever ( tileDao , width , height , imageFormat ) ; } double maxLength = tileDao . getMaxLength ( ) ; double minLength = tileDao . getMinLength ( ) ; double upperMax = getLength ( new BoundingBox ( zoomBoundingBox . getMinLongitude ( ) , zoomBoundingBox . getMaxLatitude ( ) - maxLength , zoomBoundingBox . getMinLongitude ( ) + maxLength , zoomBoundingBox . getMaxLatitude ( ) ) , projectionToWebMercator ) ; double upperMin = getLength ( new BoundingBox ( zoomBoundingBox . getMinLongitude ( ) , zoomBoundingBox . getMaxLatitude ( ) - minLength , zoomBoundingBox . getMinLongitude ( ) + minLength , zoomBoundingBox . getMaxLatitude ( ) ) , projectionToWebMercator ) ; double lowerMax = getLength ( new BoundingBox ( zoomBoundingBox . getMinLongitude ( ) , zoomBoundingBox . getMinLatitude ( ) , zoomBoundingBox . getMinLongitude ( ) + maxLength , zoomBoundingBox . getMinLatitude ( ) + maxLength ) , projectionToWebMercator ) ; double lowerMin = getLength ( new BoundingBox ( zoomBoundingBox . getMinLongitude ( ) , zoomBoundingBox . getMinLatitude ( ) , zoomBoundingBox . getMinLongitude ( ) + minLength , zoomBoundingBox . getMinLatitude ( ) + minLength ) , projectionToWebMercator ) ; double maxWebMercatorLength = Math . max ( upperMax , lowerMax ) ; double minWebMercatorLength = Math . min ( upperMin , lowerMin ) ; double minZoom = TileBoundingBoxUtils . zoomLevelOfTileSize ( maxWebMercatorLength ) ; double maxZoom = TileBoundingBoxUtils . zoomLevelOfTileSize ( minWebMercatorLength ) ; int minZoomCeiling = ( int ) Math . ceil ( minZoom ) ; int maxZoomFloor = ( int ) Math . floor ( maxZoom ) ; LOGGER . log ( Level . INFO , tileType + " Zoom Range: " + minZoomCeiling + " - " + maxZoomFloor ) ; for ( int zoomLevel = minZoomCeiling ; zoomLevel <= maxZoomFloor ; zoomLevel ++ ) { File zDirectory = new File ( directory , String . valueOf ( zoomLevel ) ) ; TileGrid tileGrid = TileBoundingBoxUtils . getTileGrid ( zoomWebMercatorBoundingBox , zoomLevel ) ; int zoomCount = 0 ; LOGGER . log ( Level . INFO , "Zoom Level: " + zoomLevel + ", Min X: " + tileGrid . getMinX ( ) + ", Max X: " + tileGrid . getMaxX ( ) + ", Min Y: " + tileGrid . getMinY ( ) + ", Max Y: " + tileGrid . getMaxY ( ) + ", Max Tiles: " + tileGrid . count ( ) ) ; for ( long x = tileGrid . getMinX ( ) ; x <= tileGrid . getMaxX ( ) ; x ++ ) { // Build the z / x directory
File xDirectory = new File ( zDirectory , String . valueOf ( x ) ) ; for ( long y = tileGrid . getMinY ( ) ; y <= tileGrid . getMaxY ( ) ; y ++ ) { GeoPackageTile geoPackageTile = retriever . getTile ( ( int ) x , ( int ) y , zoomLevel ) ; if ( geoPackageTile != null ) { // Get the y file name for the specified format
long yFileName = y ; if ( tileType == TileFormatType . TMS ) { yFileName = TileBoundingBoxUtils . getYAsOppositeTileFormat ( zoomLevel , ( int ) y ) ; } File imageFile = new File ( xDirectory , String . valueOf ( yFileName ) + "." + imageFormat ) ; // Make any needed directories for the image
xDirectory . mkdirs ( ) ; if ( geoPackageTile . getImage ( ) != null ) { // Write the image to the file
ImageIO . write ( geoPackageTile . getImage ( ) , imageFormat , imageFile ) ; } else { // Write the raw image bytes to the file
FileOutputStream fos = new FileOutputStream ( imageFile ) ; fos . write ( geoPackageTile . getData ( ) ) ; fos . close ( ) ; } zoomCount ++ ; if ( zoomCount % ZOOM_PROGRESS_FREQUENCY == 0 ) { LOGGER . log ( Level . INFO , "Zoom " + zoomLevel + " Tile Progress... " + zoomCount ) ; } } } } LOGGER . log ( Level . INFO , "Zoom " + zoomLevel + " Tiles: " + zoomCount ) ; tileCount += zoomCount ; } return tileCount ; |
public class DomainAccessFactory { /** * Create a domain accessor .
* @ param dbAccess the graph database connection
* @ param domainName
* @ return */
public static IDomainAccess createDomainAccess ( IDBAccess dbAccess , String domainName ) { } } | return IDomainAccessFactory . INSTANCE . createDomainAccess ( dbAccess , domainName ) ; |
public class WebsphereTraditionalClassLoaderHandler { /** * Find the classpath entries for the associated { @ link ClassLoader } .
* @ param classLoader
* the { @ link ClassLoader } to find the classpath entries order for .
* @ param classpathOrder
* a { @ link ClasspathOrder } object to update .
* @ param scanSpec
* the { @ link ScanSpec } .
* @ param log
* the log . */
public static void findClasspathOrder ( final ClassLoader classLoader , final ClasspathOrder classpathOrder , final ScanSpec scanSpec , final LogNode log ) { } } | final String classpath = ( String ) ReflectionUtils . invokeMethod ( classLoader , "getClassPath" , false ) ; classpathOrder . addClasspathEntries ( classpath , classLoader , scanSpec , log ) ; |
public class Sort { /** * Standard quick sort except that sorting is done on an index array rather than the values themselves
* @ param order The pre - allocated index array
* @ param values The values to sort
* @ param start The beginning of the values to sort
* @ param end The value after the last value to sort
* @ param limit The minimum size to recurse down to . */
private static void quickSort ( int [ ] order , double [ ] values , int start , int end , int limit ) { } } | // the while loop implements tail - recursion to avoid excessive stack calls on nasty cases
while ( end - start > limit ) { // pivot by a random element
int pivotIndex = start + prng . nextInt ( end - start ) ; double pivotValue = values [ order [ pivotIndex ] ] ; // move pivot to beginning of array
swap ( order , start , pivotIndex ) ; // we use a three way partition because many duplicate values is an important case
int low = start + 1 ; // low points to first value not known to be equal to pivotValue
int high = end ; // high points to first value > pivotValue
int i = low ; // i scans the array
while ( i < high ) { // invariant : values [ order [ k ] ] = = pivotValue for k in [ 0 . . low )
// invariant : values [ order [ k ] ] < pivotValue for k in [ low . . i )
// invariant : values [ order [ k ] ] > pivotValue for k in [ high . . end )
// in - loop : i < high
// in - loop : low < high
// in - loop : i > = low
double vi = values [ order [ i ] ] ; if ( vi == pivotValue ) { if ( low != i ) { swap ( order , low , i ) ; } else { i ++ ; } low ++ ; } else if ( vi > pivotValue ) { high -- ; swap ( order , i , high ) ; } else { // vi < pivotValue
i ++ ; } } // invariant : values [ order [ k ] ] = = pivotValue for k in [ 0 . . low )
// invariant : values [ order [ k ] ] < pivotValue for k in [ low . . i )
// invariant : values [ order [ k ] ] > pivotValue for k in [ high . . end )
// assert i = = high | | low = = high therefore , we are done with partition
// at this point , i = = high , from [ start , low ) are = = pivot , [ low , high ) are < and [ high , end ) are >
// we have to move the values equal to the pivot into the middle . To do this , we swap pivot
// values into the top end of the [ low , high ) range stopping when we run out of destinations
// or when we run out of values to copy
int from = start ; int to = high - 1 ; for ( i = 0 ; from < low && to >= low ; i ++ ) { swap ( order , from ++ , to -- ) ; } if ( from == low ) { // ran out of things to copy . This means that the the last destination is the boundary
low = to + 1 ; } else { // ran out of places to copy to . This means that there are uncopied pivots and the
// boundary is at the beginning of those
low = from ; } // checkPartition ( order , values , pivotValue , start , low , high , end ) ;
// now recurse , but arrange it so we handle the longer limit by tail recursion
if ( low - start < end - high ) { quickSort ( order , values , start , low , limit ) ; // this is really a way to do
// quickSort ( order , values , high , end , limit ) ;
start = high ; } else { quickSort ( order , values , high , end , limit ) ; // this is really a way to do
// quickSort ( order , values , start , low , limit ) ;
end = low ; } } |
public class EmbeddedNeo4jEntityQueries { /** * Find the nodes corresponding to an array of entity keys .
* @ param executionEngine the { @ link GraphDatabaseService } used to run the query
* @ param keys an array of keys identifying the nodes to return
* @ return the list of nodes representing the entities */
public ResourceIterator < Node > findEntities ( GraphDatabaseService executionEngine , EntityKey [ ] keys ) { } } | if ( singlePropertyKey ) { return singlePropertyIdFindEntities ( executionEngine , keys ) ; } else { return multiPropertiesIdFindEntities ( executionEngine , keys ) ; } |
public class AOStartedFlushItem { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . store . items . SIMPItem # restore ( java . io . ObjectInputStream , int ) */
public void restore ( ObjectInputStream din , int dataVersion ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "restore" , new Object [ ] { din , Integer . valueOf ( dataVersion ) } ) ; checkPersistentVersionId ( dataVersion ) ; try { if ( dataVersion == 1 ) _streamKey = din . readUTF ( ) + SIMPConstants . DEFAULT_CONSUMER_SET ; else _streamKey = din . readUTF ( ) ; _streamId = new SIBUuid12 ( din . readUTF ( ) ) ; } catch ( Exception e ) { // No FFDC code needed
SIErrorException e2 = new SIErrorException ( e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "restore" , e2 ) ; throw e2 ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "restore" ) ; |
public class CmsHtmlList { /** * Generates the list of html option elements for a html select control to select a page of a list . < p >
* @ param nrPages the total number of pages
* @ param itemsPage the maximum number of items per page
* @ param nrItems the total number of items
* @ param curPage the current page
* @ param locale the locale
* @ return html code */
public static String htmlPageSelector ( int nrPages , int itemsPage , int nrItems , int curPage , Locale locale ) { } } | StringBuffer html = new StringBuffer ( 256 ) ; for ( int i = 0 ; i < nrPages ; i ++ ) { int displayedFrom = ( i * itemsPage ) + 1 ; int displayedTo = ( ( i + 1 ) * itemsPage ) < nrItems ? ( i + 1 ) * itemsPage : nrItems ; html . append ( "\t\t\t\t<option value='" ) ; html . append ( i + 1 ) ; html . append ( "'" ) ; html . append ( ( i + 1 ) == curPage ? " selected" : "" ) ; html . append ( ">" ) ; html . append ( Messages . get ( ) . getBundle ( locale ) . key ( Messages . GUI_LIST_PAGE_ENTRY_3 , new Integer ( i + 1 ) , new Integer ( displayedFrom ) , new Integer ( displayedTo ) ) ) ; html . append ( "</option>\n" ) ; } return html . toString ( ) ; |
public class BatchedTimeoutManager { /** * Update the timeout interval of this alarm , the change will come into effect the
* next time an alarm is scheduled ( so the next alarm will pop on the old timeout )
* @ param newTimeout */
public void updateTimeout ( long newTimeout ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "updateTimeout" , Long . valueOf ( newTimeout ) ) ; this . delta = newTimeout ; long bucketInterval = delta / ( numOfBuckets - 1 ) ; percentLate = ( int ) ( 100 * bucketInterval / delta ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateTimeout" , Integer . valueOf ( percentLate ) ) ; |
public class xen_trend_microvpx_image { /** * Use this API to fetch filtered set of xen _ trend _ microvpx _ image resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static xen_trend_microvpx_image [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | xen_trend_microvpx_image obj = new xen_trend_microvpx_image ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; xen_trend_microvpx_image [ ] response = ( xen_trend_microvpx_image [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class Groovyc { /** * Executes the task .
* @ throws BuildException if an error occurs */
public void execute ( ) throws BuildException { } } | checkParameters ( ) ; resetFileLists ( ) ; loadRegisteredScriptExtensions ( ) ; if ( javac != null ) jointCompilation = true ; // scan source directories and dest directory to build up
// compile lists
String [ ] list = src . list ( ) ; for ( String filename : list ) { File file = getProject ( ) . resolveFile ( filename ) ; if ( ! file . exists ( ) ) { throw new BuildException ( "srcdir \"" + file . getPath ( ) + "\" does not exist!" , getLocation ( ) ) ; } DirectoryScanner ds = this . getDirectoryScanner ( file ) ; String [ ] files = ds . getIncludedFiles ( ) ; scanDir ( file , destDir != null ? destDir : file , files ) ; } compile ( ) ; if ( updatedProperty != null && taskSuccess && compileList . length != 0 ) { getProject ( ) . setNewProperty ( updatedProperty , "true" ) ; } |
public class Property { /** * Returns property value , even if private .
* < br / >
* Converts JaversException . MISSING _ PROPERTY to null .
* @ param target invocation target */
public Object get ( Object target ) { } } | try { return member . getEvenIfPrivate ( target ) ; } catch ( JaversException e ) { if ( e . getCode ( ) == JaversExceptionCode . MISSING_PROPERTY ) { return null ; } throw e ; } |
public class LoginClientUtil { /** * 实现j _ security _ check功能
* Login _ Module _ Name : SecurityRealm ( Jboss login - config . xml )
* @ param username
* @ param password
* @ return boolean */
public static boolean login ( String username , String password , String Login_Module_Name ) { } } | Subject subject = null ; try { // jboss
CallbackHandler pch = getJBossCallbackHandler ( username , password ) ; LoginContext loginCtx = new LoginContext ( Login_Module_Name , pch ) ; Debug . logVerbose ( "[JdonFramework] begin to login " , module ) ; loginCtx . login ( ) ; Debug . logVerbose ( "[JdonFramework] login successfully, subject=" + subject , module ) ; return true ; } catch ( Exception e ) { e . printStackTrace ( ) ; Debug . logError ( e , module ) ; return false ; } |
public class ServerBlobAuditingPoliciesInner { /** * Creates or updates a server ' s blob auditing policy .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param parameters Properties of blob auditing policy
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ServerBlobAuditingPolicyInner object if successful . */
public ServerBlobAuditingPolicyInner beginCreateOrUpdate ( String resourceGroupName , String serverName , ServerBlobAuditingPolicyInner parameters ) { } } | return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class NodeImpl { /** * it introduces some inconsistent behavior e . g . you get ' < V > ' for a Multimap but not for a Map */
private static boolean includeTypeParameterInformation ( final Class < ? > containerClass , final Integer typeArgumentIndex ) { } } | if ( containerClass == null || typeArgumentIndex == null ) { return false ; } // if ( containerClass . getTypeParameters ( ) . length < 2 ) {
// return false ;
// return ! ( Map . class . isAssignableFrom ( containerClass ) & & typeArgumentIndex = = 1 ) ;
throw new UnsupportedOperationException ( "GWT does not support getTypeArgumentIndex()." ) ; |
public class IoUtils { /** * Retrieve { @ link String } for the file located in resources .
* { @ code
* String html = getResourceAsString ( MyClass . class , " / index . html " ) ; */
public static String getResourceAsString ( Class < ? > clazz , String resource ) throws IOException { } } | try ( InputStream inputStream = clazz . getResourceAsStream ( resource ) ) { return IoUtils . toString ( inputStream ) ; } |
public class Atom10Parser { /** * List ( Elements ) - > List ( Entries ) */
protected List < Entry > parseEntries ( final Feed feed , final String baseURI , final List < Element > eEntries , final Locale locale ) { } } | final List < Entry > entries = new ArrayList < Entry > ( ) ; for ( final Element entry : eEntries ) { entries . add ( this . parseEntry ( feed , entry , baseURI , locale ) ) ; } return Lists . emptyToNull ( entries ) ; |
public class FleetSummaryMarshaller { /** * Marshall the given parameter object . */
public void marshall ( FleetSummary fleetSummary , ProtocolMarshaller protocolMarshaller ) { } } | if ( fleetSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( fleetSummary . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getCreatedTime ( ) , CREATEDTIME_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getLastUpdatedTime ( ) , LASTUPDATEDTIME_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getFleetName ( ) , FLEETNAME_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getDisplayName ( ) , DISPLAYNAME_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getCompanyCode ( ) , COMPANYCODE_BINDING ) ; protocolMarshaller . marshall ( fleetSummary . getFleetStatus ( ) , FLEETSTATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EntityFilter { /** * Filter a list of entities through a predicate .
* If the the predicate applies the entity will be included .
* @ param predicate
* @ param candidates
* @ return a subset of the actual list of candidates */
public List < T > apply ( Predicate < T > predicate , List < T > candidates ) { } } | List < T > filtered = new ArrayList < T > ( candidates . size ( ) ) ; for ( T entity : candidates ) { if ( predicate . appliesTo ( entity ) ) filtered . add ( entity ) ; } return filtered ; |
public class CollectionUtils { /** * Provides a consistent ordering over lists . First compares by the first
* element . If that element is equal , the next element is considered , and so
* on . */
public static < T extends Comparable < T > > int compareLists ( List < T > list1 , List < T > list2 ) { } } | if ( list1 == null && list2 == null ) return 0 ; if ( list1 == null || list2 == null ) { throw new IllegalArgumentException ( ) ; } int size1 = list1 . size ( ) ; int size2 = list2 . size ( ) ; int size = Math . min ( size1 , size2 ) ; for ( int i = 0 ; i < size ; i ++ ) { int c = list1 . get ( i ) . compareTo ( list2 . get ( i ) ) ; if ( c != 0 ) return c ; } if ( size1 < size2 ) return - 1 ; if ( size1 > size2 ) return 1 ; return 0 ; |
public class ChangedList { /** * Adds a changed file to the list of items to be processed . If the file
* happens to match exclusion rules it will not be added to the list ( and
* the method will return false ) . Note that only the most current update to
* any given file is provided to the change processor .
* @ param changedFile a file which has changed on the file system
* @ return false if the changedFile is null or matches at least one
* exclusion rule . */
public boolean addChangedFile ( final File changedFile ) { } } | if ( null != changedFile ) { return addChangedFile ( new ChangedFile ( changedFile ) ) ; } else { log . warn ( "The changedFile parameter was unexpectedly null. Ignored." ) ; return false ; } |
public class SubClass { /** * Define constant field and set the constant value .
* @ param modifier
* @ param fieldName
* @ param constant */
public void defineConstantField ( int modifier , String fieldName , int constant ) { } } | DeclaredType dt = ( DeclaredType ) asType ( ) ; VariableBuilder builder = new VariableBuilder ( this , fieldName , dt . getTypeArguments ( ) , typeParameterMap ) ; builder . addModifiers ( modifier ) ; builder . addModifier ( Modifier . STATIC ) ; builder . setType ( Typ . IntA ) ; FieldInfo fieldInfo = new FieldInfo ( this , builder . getVariableElement ( ) , new ConstantValue ( this , constant ) ) ; addFieldInfo ( fieldInfo ) ; fieldInfo . readyToWrite ( ) ; |
public class BitConverter { /** * Writes the given 64 - bit Long to the given byte array at the given offset .
* @ param target The byte array to write to .
* @ param offset The offset within the byte array to write at .
* @ param value The value to write .
* @ return The number of bytes written . */
public static int writeLong ( byte [ ] target , int offset , long value ) { } } | target [ offset ] = ( byte ) ( value >>> 56 ) ; target [ offset + 1 ] = ( byte ) ( value >>> 48 ) ; target [ offset + 2 ] = ( byte ) ( value >>> 40 ) ; target [ offset + 3 ] = ( byte ) ( value >>> 32 ) ; target [ offset + 4 ] = ( byte ) ( value >>> 24 ) ; target [ offset + 5 ] = ( byte ) ( value >>> 16 ) ; target [ offset + 6 ] = ( byte ) ( value >>> 8 ) ; target [ offset + 7 ] = ( byte ) value ; return Long . BYTES ; |
public class FeatureDependencyChecker { /** * Verify the name is on the uninstall list
* @ param name symbolic name of the feature
* @ param list list of the uninstalling features
* @ return true if the feature is going to be uninstalled , otherwise , return false . */
public boolean toBeUninstalled ( String name , List < UninstallAsset > list ) { } } | for ( UninstallAsset asset : list ) { String featureName = InstallUtils . getShortName ( asset . getProvisioningFeatureDefinition ( ) ) ; if ( asset . getName ( ) . equals ( name ) || ( featureName != null && featureName . equals ( name ) ) ) { InstallLogUtils . getInstallLogger ( ) . log ( Level . FINEST , "The dependent feature is specified to be uninstalled : " + featureName ) ; return true ; } } return false ; |
public class RecordsProcessorUtil { /** * アノテーション { @ link XlsNestedRecords } の定義が 、 同じBeanに対して 、 入れ子構造になっていないかチェックする 。
* @ since 1.4
* @ param recordClass チェック対象のレコードクラス
* @ param accessor アノテーションが付与されているフィールド
* @ param reader { @ link AnnotationReader } のインスタンス 。
* @ throws AnnotationInvalidException 入れ子構造になっている場合 */
public static void checkLoadingNestedRecordClass ( final Class < ? > recordClass , final FieldAccessor accessor , final AnnotationReader reader ) throws AnnotationInvalidException { } } | ArgUtils . notNull ( recordClass , "recordClass" ) ; ArgUtils . notNull ( accessor , "accessor" ) ; ArgUtils . notNull ( reader , "reader" ) ; // 再帰的にチェックしていく 。
List < Class < ? > > nestedRecordClasses = new ArrayList < > ( ) ; checkLoadingNestedRecordClass ( recordClass , accessor , reader , nestedRecordClasses ) ; |
public class IntelSmithWaterman { /** * Loads the native library , if it is supported on this platform . < p >
* Returns false if AVX is not supported . < br >
* Returns false if the native library cannot be loaded for any reason . < br >
* @ param tempDir directory where the native library is extracted or null to use the system temp directory
* @ return true if the native library is supported and loaded , false otherwise */
@ Override public synchronized boolean load ( File tempDir ) { } } | boolean isLoaded = gklUtils . load ( null ) ; if ( ! isLoaded ) { logger . warn ( "Intel GKL Utils not loaded" ) ; return false ; } if ( ! gklUtils . isAvxSupported ( ) || ! gklUtils . isAvx2Supported ( ) ) { return false ; } if ( ! NativeLibraryLoader . load ( tempDir , NATIVE_LIBRARY_NAME ) ) { return false ; } if ( ! initialized ) { initialized = true ; } if ( gklUtils . isAvx512Supported ( ) ) { logger . info ( "Using CPU-supported AVX-512 instructions" ) ; } /* Initializes the function pointers to use machine specific optimized code */
initNative ( ) ; return true ; |
public class Transliterator { /** * Register two targets as being inverses of one another . For
* example , calling registerSpecialInverse ( " NFC " , " NFD " , true ) causes
* Transliterator to form the following inverse relationships :
* < pre > NFC = & gt ; NFD
* Any - NFC = & gt ; Any - NFD
* NFD = & gt ; NFC
* Any - NFD = & gt ; Any - NFC < / pre >
* ( Without the special inverse registration , the inverse of NFC
* would be NFC - Any . ) Note that NFD is shorthand for Any - NFD , but
* that the presence or absence of " Any - " is preserved .
* < p > The relationship is symmetrical ; registering ( a , b ) is
* equivalent to registering ( b , a ) .
* < p > The relevant IDs must still be registered separately as
* factories or classes .
* < p > Only the targets are specified . Special inverses always
* have the form Any - Target1 & lt ; = & gt ; Any - Target2 . The target should
* have canonical casing ( the casing desired to be produced when
* an inverse is formed ) and should contain no whitespace or other
* extraneous characters .
* @ param target the target against which to register the inverse
* @ param inverseTarget the inverse of target , that is
* Any - target . getInverse ( ) = & gt ; Any - inverseTarget
* @ param bidirectional if true , register the reverse relation
* as well , that is , Any - inverseTarget . getInverse ( ) = & gt ; Any - target */
static void registerSpecialInverse ( String target , String inverseTarget , boolean bidirectional ) { } } | TransliteratorIDParser . registerSpecialInverse ( target , inverseTarget , bidirectional ) ; |
public class ProviderList { /** * attempt to load all Providers not already loaded */
private int loadAll ( ) { } } | if ( allLoaded ) { return configs . length ; } if ( debug != null ) { debug . println ( "Loading all providers" ) ; new Exception ( "Call trace" ) . printStackTrace ( ) ; } int n = 0 ; for ( int i = 0 ; i < configs . length ; i ++ ) { Provider p = configs [ i ] . getProvider ( ) ; if ( p != null ) { n ++ ; } } if ( n == configs . length ) { allLoaded = true ; } return n ; |
public class CreateJobRequest { /** * A section of the request body that provides information about the files that are being transcoded .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setInputs ( java . util . Collection ) } or { @ link # withInputs ( java . util . Collection ) } if you want to override the
* existing values .
* @ param inputs
* A section of the request body that provides information about the files that are being transcoded .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateJobRequest withInputs ( JobInput ... inputs ) { } } | if ( this . inputs == null ) { setInputs ( new com . amazonaws . internal . SdkInternalList < JobInput > ( inputs . length ) ) ; } for ( JobInput ele : inputs ) { this . inputs . add ( ele ) ; } return this ; |
public class HessianFactory { /** * Frees a Hessian 2.0 deserializer */
public void freeHessian2Input ( Hessian2Input in ) { } } | if ( in == null ) return ; in . free ( ) ; _freeHessian2Input . free ( in ) ; |
public class RoomInfoImpl { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . command . UpdateRoomInfo # setCapacity ( int , int ) */
@ Override public void setCapacity ( int maxUser , int maxSpectators ) { } } | room . setCapacity ( maxUser , maxSpectators ) ; apiRoom . setMaxUsers ( maxUser ) ; apiRoom . setMaxSpectators ( maxSpectators ) ; |
public class CommonSwing { /** * ( weconsultants @ users : Callable setFramePositon method */
static void setFramePositon ( JFrame inTargetFrame ) { } } | Dimension d = Toolkit . getDefaultToolkit ( ) . getScreenSize ( ) ; Dimension size = inTargetFrame . getSize ( ) ; // ( ulrivo ) : full size on screen with less than 640 width
if ( d . width >= 640 ) { inTargetFrame . setLocation ( ( d . width - size . width ) / 2 , ( d . height - size . height ) / 2 ) ; } else { inTargetFrame . setLocation ( 0 , 0 ) ; inTargetFrame . setSize ( d ) ; } |
public class JDBC4DatabaseMetaData { /** * Retrieves a description of a table ' s optimal set of columns that uniquely identifies a row . */
@ Override public ResultSet getBestRowIdentifier ( String catalog , String schema , String table , int scope , boolean nullable ) throws SQLException { } } | checkClosed ( ) ; throw SQLError . noSupport ( ) ; |
public class ExpandableRecyclerAdapter { /** * Gets the index of a ExpandableWrapper within the helper item list based on
* the index of the ExpandableWrapper .
* @ param parentPosition The index of the parent in the list of parents
* @ return The index of the parent in the merged list of children and parents */
@ UiThread private int getFlatParentPosition ( int parentPosition ) { } } | int parentCount = 0 ; int listItemCount = mFlatItemList . size ( ) ; for ( int i = 0 ; i < listItemCount ; i ++ ) { if ( mFlatItemList . get ( i ) . isParent ( ) ) { parentCount ++ ; if ( parentCount > parentPosition ) { return i ; } } } return INVALID_FLAT_POSITION ; |
public class ESHttpUtils { /** * Returns a list of nodes from a given document using xpath .
* @ param rootNode
* Node to search .
* @ param xPath
* XPath to use .
* @ param expression
* XPath expression .
* @ return Nodes or < code > null < / code > if no match was found . */
@ Nullable public static NodeList findNodes ( @ NotNull final Node rootNode , @ NotNull final XPath xPath , @ NotNull final String expression ) { } } | Contract . requireArgNotNull ( "doc" , rootNode ) ; Contract . requireArgNotNull ( "xPath" , xPath ) ; Contract . requireArgNotNull ( "expression" , expression ) ; try { return ( NodeList ) xPath . compile ( expression ) . evaluate ( rootNode , XPathConstants . NODESET ) ; } catch ( final XPathExpressionException ex ) { throw new RuntimeException ( "Failed to read node: " + expression , ex ) ; } |
public class ShanksAgentBayesianReasoningCapability { /** * Update the CPT of the aux node to get the soft - evidence in the target
* node
* @ param bn
* @ param targetNode
* @ param auxNode
* @ param softEvidence
* @ throws ShanksException */
private static void updateSoftEvidenceAuxiliaryNodeCPT ( Network bn , String targetNodeName , HashMap < String , Double > softEvidence ) throws ShanksException { } } | int targetNode = bn . getNode ( targetNodeName ) ; int auxNode = bn . getNode ( softEvidenceNodePrefix + targetNodeName ) ; // Check if new beliefs join 1
double total = 0 ; for ( Entry < String , Double > entry : softEvidence . entrySet ( ) ) { total += entry . getValue ( ) ; } double aux = 1 - total ; if ( aux < ( - 0.05 ) || aux > 0.05 ) { throw new ShanksException ( "Impossible to set soft-evidence in node: " + targetNodeName + " Target probabilistic distribution is not consistent. All states joint: " + total ) ; } // Check if believes are consistent
if ( bn . getOutcomeCount ( targetNodeName ) != softEvidence . size ( ) ) { throw new ShanksException ( "Old belief and new belief are incompatible. Different number of states of hypothesis" ) ; } for ( String status : softEvidence . keySet ( ) ) { boolean found = false ; String [ ] outcomes = bn . getOutcomeIds ( targetNode ) ; for ( int i = 0 ; i < outcomes . length ; i ++ ) { if ( outcomes [ i ] . equals ( status ) ) { found = true ; break ; } } if ( ! found ) { throw new ShanksException ( "Not valid Belief, exist unconsistent between current and old believes." ) ; } } // Now , when the consistent has been checked
// Update the belief
// Reset evidence .
ShanksAgentBayesianReasoningCapability . clearEvidence ( bn , bn . getNodeName ( auxNode ) ) ; // Obtain required data
int size = bn . getOutcomeCount ( targetNodeName ) ; double [ ] currentProbabilities = new double [ size ] ; double [ ] newProbabilities = new double [ size ] ; String [ ] states = bn . getOutcomeIds ( targetNodeName ) ; bn . updateBeliefs ( ) ; double [ ] values = bn . getNodeValue ( targetNodeName ) ; for ( int i = 0 ; i < size ; i ++ ) { for ( String status : softEvidence . keySet ( ) ) { if ( states [ i ] . equals ( status ) ) { currentProbabilities [ i ] = values [ i ] ; newProbabilities [ i ] = softEvidence . get ( status ) ; break ; } } } // Build the new CPT
double auxNumber = new Float ( 0.1 ) ; double [ ] cptProbabilitiesPrime = new double [ currentProbabilities . length ] ; for ( int i = 0 ; i < currentProbabilities . length ; i ++ ) { cptProbabilitiesPrime [ i ] = 0.5 ; if ( currentProbabilities [ i ] != 0.0 ) { cptProbabilitiesPrime [ i ] = newProbabilities [ i ] * auxNumber / currentProbabilities [ i ] ; } else if ( newProbabilities [ i ] != 0.0 ) { // If there is a full confidence ( i . e . 0.0 probability , you must
// not request a belief
// It is impossible that a change in a full confidence ( 100 % or
// If this occurs , the BN is wrong fixed ( CPT ' s are wrong )
throw new ShanksException ( "Incoherence! Belief does not update. Probability for status: " + targetNodeName + " is ficked as evidence!!" ) ; } if ( cptProbabilitiesPrime [ i ] > 1 ) { // In this case , reduce auxNumber and rebuild CPT probabilities .
auxNumber = auxNumber / 2 ; i = - 1 ; } } // Update the CPT and the Inference Engine
// TODO check this
double [ ] cpt = new double [ bn . getNodeDefinition ( auxNode ) . length ] ; for ( int i = 0 ; i < currentProbabilities . length ; i ++ ) { cpt [ 2 * i ] = cptProbabilitiesPrime [ i ] ; cpt [ 1 + ( 2 * i ) ] = ( 1 - cptProbabilitiesPrime [ i ] ) ; } bn . setNodeDefinition ( softEvidenceNodePrefix + targetNodeName , cpt ) ; // Compiling new network
// Map < String , String > currentEvidences =
// ShanksAgentBayesianReasoningCapability
// . getEvidences ( bn ) ;
// ShanksAgentBayesianReasoningCapability . clearEvidences ( bn ) ;
// ShanksAgentBayesianReasoningCapability . addEvidences ( bn ,
// currentEvidences ) ;
// Testing CPT
// ShanksAgentBayesianReasoningCapability . addEvidence ( bn ,
// softEvidenceNodePrefix + targetNodeName , triggerState ) ;
// double conf =
// ShanksAgentBayesianReasoningCapability . getHypothesis ( bn ,
// softEvidenceNodePrefix + targetNodeName , triggerState ) ;
// if ( Math . abs ( conf - 1 ) > 0.01 ) {
// throw new ShanksException (
// " Error adding finding to soft - evidence node for node : "
// + targetNodeName
// + " It should be equals to 1 , but it is : " + conf ) ;
// ShanksAgentBayesianReasoningCapability . clearEvidence ( bn , auxNode ) ; |
public class LogRepositoryBaseImpl { /** * Retrieves the label out of the directory name ( Qualifier 3)
* @ param fileName
* @ return */
public static String parseLabel ( String fileName ) { } } | if ( fileName == null || fileName . isEmpty ( ) ) { return null ; } int labelIndex = fileName . indexOf ( LABELSEPARATOR ) ; return labelIndex < 0 ? null : fileName . substring ( labelIndex + 1 ) ; |
public class CmsHistorySettingsDialog { /** * Returns a list with the possible modes for the history to keep . < p >
* @ return a list with the possible modes for the history to keep */
private List getModes ( ) { } } | ArrayList ret = new ArrayList ( ) ; ret . add ( new CmsSelectWidgetOption ( String . valueOf ( CmsHistorySettings . MODE_DELETED_HISTORY_DISABLED ) , m_historySettings . getMode ( ) == CmsHistorySettings . MODE_DELETED_HISTORY_DISABLED , key ( Messages . GUI_HISTORY_SETTINGS_MODE_DISABLED_0 ) ) ) ; ret . add ( new CmsSelectWidgetOption ( String . valueOf ( CmsHistorySettings . MODE_DELETED_HISTORY_KEEP_NO_VERSIONS ) , m_historySettings . getMode ( ) == CmsHistorySettings . MODE_DELETED_HISTORY_KEEP_NO_VERSIONS , key ( Messages . GUI_HISTORY_SETTINGS_MODE_KEEP_NO_VERSIONS_0 ) ) ) ; ret . add ( new CmsSelectWidgetOption ( String . valueOf ( CmsHistorySettings . MODE_DELETED_HISTORY_KEEP_WITH_VERSIONS ) , m_historySettings . getMode ( ) == CmsHistorySettings . MODE_DELETED_HISTORY_KEEP_WITH_VERSIONS , key ( Messages . GUI_HISTORY_SETTINGS_MODE_KEEP_WITH_VERSIONS_0 ) ) ) ; return ret ; |
public class AnnotationUtils { /** * Find a single { @ link Annotation } of { @ code annotationType } on the
* supplied { @ link Class } , traversing its interfaces , annotations , and
* superclasses if the annotation is not < em > present < / em > on the given class
* itself .
* < p > This method explicitly handles class - level annotations which are not
* declared as { @ link java . lang . annotation . Inherited inherited } < em > as well
* as meta - annotations and annotations on interfaces < / em > .
* < p > The algorithm operates as follows :
* < ol >
* < li > Search for the annotation on the given class and return it if found .
* < li > Recursively search through all interfaces that the given class declares .
* < li > Recursively search through all annotations that the given class declares .
* < li > Recursively search through the superclass hierarchy of the given class .
* < / ol >
* < p > Note : in this context , the term < em > recursively < / em > means that the search
* process continues by returning to step # 1 with the current interface ,
* annotation , or superclass as the class to look for annotations on .
* @ param clazz the class to look for annotations on
* @ param annotationType the type of annotation to look for
* @ return the annotation if found , or { @ code null } if not found */
@ SuppressWarnings ( "unchecked" ) public static < A extends Annotation > A findAnnotation ( Class < ? > clazz , Class < A > annotationType ) { } } | AnnotationCacheKey cacheKey = new AnnotationCacheKey ( clazz , annotationType ) ; A result = ( A ) findAnnotationCache . get ( cacheKey ) ; if ( result == null ) { result = findAnnotation ( clazz , annotationType , new HashSet < Annotation > ( ) ) ; if ( result != null ) { findAnnotationCache . put ( cacheKey , result ) ; } } return result ; |
public class IdDt { /** * Creates a new instance of this ID which is identical , but refers to the specific version of this resource ID noted by theVersion .
* @ param theVersion The actual version string , e . g . " 1 " . If theVersion is blank or null , returns the same as { @ link # toVersionless ( ) } }
* @ return A new instance of IdDt which is identical , but refers to the specific version of this resource ID noted by theVersion . */
@ Override public IdDt withVersion ( String theVersion ) { } } | if ( isBlank ( theVersion ) ) { return toVersionless ( ) ; } if ( isLocal ( ) || isUrn ( ) ) { return new IdDt ( getValueAsString ( ) ) ; } String existingValue = getValue ( ) ; int i = existingValue . indexOf ( Constants . PARAM_HISTORY ) ; String value ; if ( i > 1 ) { value = existingValue . substring ( 0 , i - 1 ) ; } else { value = existingValue ; } return new IdDt ( value + '/' + Constants . PARAM_HISTORY + '/' + theVersion ) ; |
public class IssueManager { /** * Free - form search that does not do any paging for you . Btw , where is Redmine free - form search documentation ? ?
* Sample usage :
* < pre >
* Params params = new Params ( )
* . add ( " set _ filter " , " 1 " )
* . add ( " f [ ] " , " summary " )
* . add ( " op [ summary ] " , " ~ " )
* . add ( " v [ summary ] " , " another " )
* . add ( " f [ ] " , " description " )
* . add ( " op [ description ] " , " ~ " )
* . add ( " v [ description ] [ ] " , " abc " ) ;
* list = issueManager . getIssues ( params ) ;
* < / pre >
* @ param parameters */
public ResultsWrapper < Issue > getIssues ( Params parameters ) throws RedmineException { } } | return transport . getObjectsListNoPaging ( Issue . class , parameters . getList ( ) ) ; |
public class BoxHttpResponse { /** * Open connection to the resource .
* @ throws IOException if an error occurs connecting to server . */
public void open ( ) throws IOException { } } | mConnection . connect ( ) ; mContentType = mConnection . getContentType ( ) ; mResponseCode = mConnection . getResponseCode ( ) ; mContentEncoding = mConnection . getContentEncoding ( ) ; |
public class JettyConstraintUtil { /** * Create constraint which redirect to Secure Port
* @ return ConstraintSecurityHandler */
public static ConstraintSecurityHandler getConstraintSecurityHandlerConfidential ( ) { } } | Constraint constraint = new Constraint ( ) ; constraint . setDataConstraint ( Constraint . DC_CONFIDENTIAL ) ; ConstraintMapping constraintMapping = new ConstraintMapping ( ) ; constraintMapping . setConstraint ( constraint ) ; constraintMapping . setPathSpec ( "/*" ) ; ConstraintSecurityHandler constraintSecurityHandler = new ConstraintSecurityHandler ( ) ; constraintSecurityHandler . addConstraintMapping ( constraintMapping ) ; return constraintSecurityHandler ; |
public class USerializedSet { /** * Returns true if the given USerializedSet contains the given
* character .
* @ param c the character to test for
* @ return true if set contains c */
public final boolean contains ( int c ) { } } | if ( c > 0x10ffff ) { return false ; } if ( c <= 0xffff ) { int i ; /* find c in the BMP part */
for ( i = 0 ; i < bmpLength && ( char ) c >= array [ i ] ; ++ i ) { } return ( ( i & 1 ) != 0 ) ; } else { int i ; /* find c in the supplementary part */
char high = ( char ) ( c >> 16 ) , low = ( char ) c ; for ( i = bmpLength ; i < length && ( high > array [ i ] || ( high == array [ i ] && low >= array [ i + 1 ] ) ) ; i += 2 ) { } /* count pairs of 16 - bit units even per BMP and check if the number of pairs is odd */
return ( ( ( i + bmpLength ) & 2 ) != 0 ) ; } |
public class CheckBox { /** * The CheckBox uncheck function It invokes SeLion session to handle the uncheck action against the element . Waits
* until element is found with given locator . */
public void uncheck ( String locator ) { } } | getDispatcher ( ) . beforeUncheck ( this , locator ) ; this . uncheck ( ) ; validatePresenceOfAlert ( ) ; WebDriverWaitUtils . waitUntilElementIsPresent ( locator ) ; getDispatcher ( ) . afterUncheck ( this , locator ) ; |
public class AbstractIoSession { /** * TODO Add method documentation */
public final void increaseReadBufferSize ( ) { } } | if ( AbstractIoSessionConfig . ENABLE_BUFFER_SIZE ) { System . out . println ( "AbstractIoSession.increaseReadBufferSize()" ) ; int newReadBufferSize = getConfig ( ) . getReadBufferSize ( ) << 1 ; if ( newReadBufferSize <= getConfig ( ) . getMaxReadBufferSize ( ) ) { getConfig ( ) . setReadBufferSize ( newReadBufferSize ) ; } else { getConfig ( ) . setReadBufferSize ( getConfig ( ) . getMaxReadBufferSize ( ) ) ; } } deferDecreaseReadBuffer = true ; |
public class HashIndex { /** * Inserts a new index record into this index .
* @ see Index # insert ( SearchKey , RecordId , boolean ) */
@ Override public void insert ( SearchKey key , RecordId dataRecordId , boolean doLogicalLogging ) { } } | // search the position
beforeFirst ( new SearchRange ( key ) ) ; // log the logical operation starts
if ( doLogicalLogging ) tx . recoveryMgr ( ) . logLogicalStart ( ) ; // insert the data
rf . insert ( ) ; for ( int i = 0 ; i < keyType . length ( ) ; i ++ ) rf . setVal ( keyFieldName ( i ) , key . get ( i ) ) ; rf . setVal ( SCHEMA_RID_BLOCK , new BigIntConstant ( dataRecordId . block ( ) . number ( ) ) ) ; rf . setVal ( SCHEMA_RID_ID , new IntegerConstant ( dataRecordId . id ( ) ) ) ; // log the logical operation ends
if ( doLogicalLogging ) tx . recoveryMgr ( ) . logIndexInsertionEnd ( ii . indexName ( ) , key , dataRecordId . block ( ) . number ( ) , dataRecordId . id ( ) ) ; |
public class AccountsImpl { /** * Gets the number of nodes in each state , grouped by pool .
* ServiceResponseWithHeaders < PageImpl < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > * @ param accountListPoolNodeCountsOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the PagedList & lt ; PoolNodeCounts & gt ; object wrapped in { @ link ServiceResponseWithHeaders } if successful . */
public Observable < ServiceResponseWithHeaders < Page < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > > listPoolNodeCountsSinglePageAsync ( final AccountListPoolNodeCountsOptions accountListPoolNodeCountsOptions ) { } } | if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( accountListPoolNodeCountsOptions ) ; String filter = null ; if ( accountListPoolNodeCountsOptions != null ) { filter = accountListPoolNodeCountsOptions . filter ( ) ; } Integer maxResults = null ; if ( accountListPoolNodeCountsOptions != null ) { maxResults = accountListPoolNodeCountsOptions . maxResults ( ) ; } Integer timeout = null ; if ( accountListPoolNodeCountsOptions != null ) { timeout = accountListPoolNodeCountsOptions . timeout ( ) ; } UUID clientRequestId = null ; if ( accountListPoolNodeCountsOptions != null ) { clientRequestId = accountListPoolNodeCountsOptions . clientRequestId ( ) ; } Boolean returnClientRequestId = null ; if ( accountListPoolNodeCountsOptions != null ) { returnClientRequestId = accountListPoolNodeCountsOptions . returnClientRequestId ( ) ; } DateTime ocpDate = null ; if ( accountListPoolNodeCountsOptions != null ) { ocpDate = accountListPoolNodeCountsOptions . ocpDate ( ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . listPoolNodeCounts ( this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , filter , maxResults , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Page < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < PageImpl < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > result = listPoolNodeCountsDelegate ( response ) ; return Observable . just ( new ServiceResponseWithHeaders < Page < PoolNodeCounts > , AccountListPoolNodeCountsHeaders > ( result . body ( ) , result . headers ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class Generators { /** * Constructs a generator that yields the specified hours in increasing
* order for each day .
* @ param hours the hour values ( each value must be in range [ 0,23 ] )
* @ param dtStart the start date
* @ return the generator */
static Generator byHourGenerator ( int [ ] hours , final DateValue dtStart ) { } } | final TimeValue dtStartTime = TimeUtils . timeOf ( dtStart ) ; final int [ ] uhours = ( hours . length == 0 ) ? new int [ ] { dtStartTime . hour ( ) } : Util . uniquify ( hours ) ; if ( uhours . length == 1 ) { final int hour = uhours [ 0 ] ; return new SingleValueGenerator ( ) { int year ; int month ; int day ; @ Override boolean generate ( DTBuilder builder ) { if ( year != builder . year || month != builder . month || day != builder . day ) { year = builder . year ; month = builder . month ; day = builder . day ; builder . hour = hour ; return true ; } return false ; } @ Override int getValue ( ) { return hour ; } @ Override public String toString ( ) { return "byHourGenerator:" + hour ; } } ; } return new Generator ( ) { int i ; int year = dtStart . year ( ) ; int month = dtStart . month ( ) ; int day = dtStart . day ( ) ; { int hour = dtStartTime . hour ( ) ; while ( i < uhours . length && uhours [ i ] < hour ) { ++ i ; } } @ Override boolean generate ( DTBuilder builder ) { if ( year != builder . year || month != builder . month || day != builder . day ) { i = 0 ; year = builder . year ; month = builder . month ; day = builder . day ; } if ( i >= uhours . length ) { return false ; } builder . hour = uhours [ i ++ ] ; return true ; } @ Override public String toString ( ) { return "byHourGenerator:" + Arrays . toString ( uhours ) ; } } ; |
public class GosuClassPathThing { /** * We don ' t currently wrap the chain of loaders for WebSphere or WebLogic or JBoss
* because they use " module " class loaders that are not URLClassLoader - like . We
* can maybe someday handle them seperately .
* IBM class loader chain :
* com . guidewire . pl . system . gosu . GosuPluginContainer - >
* com . guidewire . pl . system . integration . plugins . PluginContainer - >
* com . guidewire . pl . system . integration . plugins . SharedPluginContainer - >
* com . guidewire . pl . system . integration . plugins . PluginContainer - >
* [ weblogic . utils . classloaders . ChangeAwareClassLoader - >
* weblogic . utils . classloaders . FilteringClassLoader - >
* weblogic . utils . classloaders . GenericClassLoader ] * - >
* sun . misc . Launcher $ AppClassLoader - >
* sun . misc . Launcher $ ExtClassLoader - >
* < null >
* WebLogic class loader chain :
* com . guidewire . pl . system . gosu . GosuPluginContainer - >
* com . guidewire . pl . system . integration . plugins . PluginContainer - >
* com . guidewire . pl . system . integration . plugins . SharedPluginContainer - >
* com . guidewire . pl . system . integration . plugins . PluginContainer - >
* org . jboss . modules . ModuleClassLoader - >
* sun . misc . Launcher $ AppClassLoader - >
* sun . misc . Launcher $ ExtClassLoader - >
* < null > */
private static boolean canWrapChain ( ClassLoader loader ) { } } | if ( loader == null ) { return false ; } UrlClassLoaderWrapper wrapped = UrlClassLoaderWrapper . wrap ( loader ) ; boolean bSysLoader = loader == ClassLoader . getSystemClassLoader ( ) ; if ( bSysLoader ) { return wrapped != null ; } loader = loader . getParent ( ) ; return wrapped != null && canWrapChain ( loader ) ; |
public class Whitebox { /** * Invoke a private or inner class method in that is located in a subclass
* of the instance . This might be useful to test private methods .
* Use this for overloaded methods .
* @ throws Exception
* Exception that may occur when invoking this method . */
public static synchronized < T > T invokeMethod ( Object object , Class < ? > declaringClass , String methodToExecute , Class < ? > [ ] parameterTypes , Object ... arguments ) throws Exception { } } | return WhiteboxImpl . invokeMethod ( object , declaringClass , methodToExecute , parameterTypes , arguments ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.