signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class MolecularFormulaManipulator { /** * Construct an instance of IMolecularFormula , initialized with a molecular
* formula string . The string is immediately analyzed and a set of Nodes
* is built based on this analysis . The hydrogens must be implicit . Major
* isotopes are being used .
* @ param stringMF The molecularFormula string
* @ param builder a IChemObjectBuilder which is used to construct atoms
* @ return The filled IMolecularFormula
* @ see # getMolecularFormula ( String , IMolecularFormula ) */
public static IMolecularFormula getMajorIsotopeMolecularFormula ( String stringMF , IChemObjectBuilder builder ) { } }
|
return getMolecularFormula ( stringMF , true , builder ) ;
|
public class CmsFlexCacheKey { /** * Parse a String in the Flex cache language and construct
* the key data structure from this . < p >
* @ param key the String to parse ( usually read from the file property " cache " ) */
private void parseFlexKey ( String key ) { } }
|
List < String > tokens = CmsStringUtil . splitAsList ( key , ';' , false ) ; Iterator < String > i = tokens . iterator ( ) ; try { while ( i . hasNext ( ) ) { String t = i . next ( ) ; String k = null ; String v = null ; int idx = t . indexOf ( '=' ) ; if ( idx >= 0 ) { k = t . substring ( 0 , idx ) . trim ( ) ; if ( t . length ( ) > idx ) { v = t . substring ( idx + 1 ) . trim ( ) ; } } else { k = t . trim ( ) ; } m_always = 0 ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_PARSE_FLEXKEY_3 , t , k , v ) ) ; } switch ( CACHE_COMMANDS . indexOf ( k ) ) { case 0 : // always
case 12 : // true
m_always = 1 ; // continue processing ( make sure we find a " never " behind " always " )
break ; case 1 : // never
case 10 : // false
m_always = - 1 ; // no need for any further processing
return ; case 2 : // uri
m_uri = IS_USED ; // marks m _ uri as being used
break ; case 3 : // user
m_user = IS_USED ; // marks m _ user as being used
break ; case 4 : // params
if ( v != null ) { m_params = parseValueList ( v ) ; } else { m_params = Collections . emptySet ( ) ; } if ( m_params . contains ( I_CmsResourceLoader . PARAMETER_ELEMENT ) ) { // workaround for element setting by parameter in OpenCms < 6.0
m_element = IS_USED ; m_params . remove ( I_CmsResourceLoader . PARAMETER_ELEMENT ) ; if ( m_params . size ( ) == 0 ) { m_params = null ; } } break ; case 5 : // no - params
if ( v != null ) { // no - params are present
m_noparams = parseValueList ( v ) ; } else { // never cache with parameters
m_noparams = Collections . emptySet ( ) ; } break ; case 6 : // timeout
m_timeout = Integer . parseInt ( v ) ; break ; case 7 : // session
m_session = parseValueList ( v ) ; if ( m_session . size ( ) <= 0 ) { // session must have at last one variable set
m_parseError = true ; } break ; case 8 : // schemes
m_schemes = parseValueList ( v ) ; break ; case 9 : // ports
Set < String > ports = parseValueList ( v ) ; m_ports = new HashSet < Integer > ( ports . size ( ) ) ; for ( String p : ports ) { try { m_ports . add ( Integer . valueOf ( p ) ) ; } catch ( NumberFormatException e ) { // ignore this number
} } break ; case 11 : // previous parse error - ignore
break ; case 13 : // ip
m_ip = IS_USED ; // marks ip as being used
break ; case 14 : // element
m_element = IS_USED ; break ; case 15 : // locale
m_locale = IS_USED ; break ; case 16 : // encoding
m_encoding = IS_USED ; break ; case 17 : // site
m_site = IS_USED ; break ; case 18 : // attrs
if ( v != null ) { m_attrs = parseValueList ( v ) ; } else { m_attrs = null ; } break ; case 19 : // no - attrs
if ( v != null ) { // no - attrs are present
m_noattrs = parseValueList ( v ) ; } else { // never cache with attributes
m_noattrs = Collections . emptySet ( ) ; } break ; case 20 : // device
m_device = IS_USED ; // marks m _ device as being used
break ; case 21 : // container element
m_containerElement = IS_USED ; break ; default : // unknown directive , throw error
m_parseError = true ; } } } catch ( Exception e ) { // any Exception here indicates a parsing error
if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEKEY_PARSE_ERROR_1 , e . toString ( ) ) , e ) ; } m_parseError = true ; } if ( m_parseError ) { // If string is invalid set cache to " never "
m_always = - 1 ; }
|
public class TrAXFilter { /** * Parse a document .
* @ param systemId The system identifier as a fully - qualified URI .
* @ throws org . xml . sax . SAXException Any SAX exception , possibly
* wrapping another exception .
* @ throws java . io . IOException An IO exception from the parser ,
* possibly from a byte stream or character stream
* supplied by the application .
* @ see org . xml . sax . XMLReader # parse ( java . lang . String ) */
public void parse ( String systemId ) throws org . xml . sax . SAXException , IOException { } }
|
parse ( new InputSource ( systemId ) ) ;
|
public class MapFile { /** * This method attempts to fix a corrupt MapFile by re - creating its index .
* @ param fs filesystem
* @ param dir directory containing the MapFile data and index
* @ param keyClass key class ( has to be a subclass of Writable )
* @ param valueClass value class ( has to be a subclass of Writable )
* @ param dryrun do not perform any changes , just report what needs to be done
* @ return number of valid entries in this MapFile , or - 1 if no fixing was needed
* @ throws Exception */
public static long fix ( FileSystem fs , Path dir , Class < ? extends Writable > keyClass , Class < ? extends Writable > valueClass , boolean dryrun , Configuration conf ) throws Exception { } }
|
String dr = ( dryrun ? "[DRY RUN ] " : "" ) ; Path data = new Path ( dir , DATA_FILE_NAME ) ; Path index = new Path ( dir , INDEX_FILE_NAME ) ; int indexInterval = 128 ; if ( ! fs . exists ( data ) ) { // there ' s nothing we can do to fix this !
throw new Exception ( dr + "Missing data file in " + dir + ", impossible to fix this." ) ; } if ( fs . exists ( index ) ) { // no fixing needed
return - 1 ; } SequenceFile . Reader dataReader = new SequenceFile . Reader ( fs , data , conf ) ; if ( ! dataReader . getKeyClass ( ) . equals ( keyClass ) ) { throw new Exception ( dr + "Wrong key class in " + dir + ", expected" + keyClass . getName ( ) + ", got " + dataReader . getKeyClass ( ) . getName ( ) ) ; } if ( ! dataReader . getValueClass ( ) . equals ( valueClass ) ) { throw new Exception ( dr + "Wrong value class in " + dir + ", expected" + valueClass . getName ( ) + ", got " + dataReader . getValueClass ( ) . getName ( ) ) ; } long cnt = 0L ; Writable key = ReflectionUtils . newInstance ( keyClass , conf ) ; Writable value = ReflectionUtils . newInstance ( valueClass , conf ) ; SequenceFile . Writer indexWriter = null ; if ( ! dryrun ) indexWriter = SequenceFile . createWriter ( fs , conf , index , keyClass , LongWritable . class ) ; try { long pos = 0L ; LongWritable position = new LongWritable ( ) ; while ( dataReader . next ( key , value ) ) { cnt ++ ; if ( cnt % indexInterval == 0 ) { position . set ( pos ) ; if ( ! dryrun ) indexWriter . append ( key , position ) ; } pos = dataReader . getPosition ( ) ; } } catch ( Throwable t ) { // truncated data file . swallow it .
} dataReader . close ( ) ; if ( ! dryrun ) indexWriter . close ( ) ; return cnt ;
|
public class AtomCache { /** * Returns the representation of a { @ link ScopDomain } as a BioJava { @ link Structure } object .
* @ param domain
* a SCOP domain
* @ return a Structure object
* @ throws IOException
* @ throws StructureException */
public Structure getStructureForDomain ( ScopDomain domain ) throws IOException , StructureException { } }
|
return getStructureForDomain ( domain , ScopFactory . getSCOP ( ) ) ;
|
public class Scope { /** * 获取变量
* 自内向外在作用域栈中查找变量 , 返回最先找到的变量 */
public Object get ( Object key ) { } }
|
for ( Scope cur = this ; cur != null ; cur = cur . parent ) { // if ( cur . data ! = null & & cur . data . containsKey ( key ) ) {
// return cur . data . get ( key ) ;
if ( cur . data != null ) { Object ret = cur . data . get ( key ) ; if ( ret != null ) { return ret ; } if ( cur . data . containsKey ( key ) ) { return null ; } } } // return null ;
return sharedObjectMap != null ? sharedObjectMap . get ( key ) : null ;
|
public class PrintfFormat { /** * Format a double .
* @ param x The double to format .
* @ return The formatted String .
* @ exception IllegalArgumentException if the
* conversion character is c , C , s , S ,
* d , d , x , X , or o . */
public String sprintf ( double x ) throws IllegalArgumentException { } }
|
Enumeration e = vFmt . elements ( ) ; ConversionSpecification cs = null ; char c = 0 ; StringBuffer sb = new StringBuffer ( ) ; while ( e . hasMoreElements ( ) ) { cs = ( ConversionSpecification ) e . nextElement ( ) ; c = cs . getConversionCharacter ( ) ; if ( c == '\0' ) sb . append ( cs . getLiteral ( ) ) ; else if ( c == '%' ) sb . append ( "%" ) ; else sb . append ( cs . internalsprintf ( x ) ) ; } return sb . toString ( ) ;
|
public class IonRawBinaryWriter { /** * short - hand for array of bytes - - useful for static definitions . */
private static byte [ ] bytes ( int ... vals ) { } }
|
final byte [ ] octets = new byte [ vals . length ] ; for ( int i = 0 ; i < vals . length ; i ++ ) { octets [ i ] = ( byte ) vals [ i ] ; } return octets ;
|
public class NonTxDistributionInterceptor { /** * we ' re assuming that this function is ran on primary owner of given segments */
private Map < Address , IntSet > backupOwnersOfSegments ( ConsistentHash ch , IntSet segments ) { } }
|
Map < Address , IntSet > map = new HashMap < > ( ch . getMembers ( ) . size ( ) ) ; if ( ch . isReplicated ( ) ) { for ( Address member : ch . getMembers ( ) ) { map . put ( member , segments ) ; } map . remove ( rpcManager . getAddress ( ) ) ; } else { int numSegments = ch . getNumSegments ( ) ; for ( PrimitiveIterator . OfInt iter = segments . iterator ( ) ; iter . hasNext ( ) ; ) { int segment = iter . nextInt ( ) ; List < Address > owners = ch . locateOwnersForSegment ( segment ) ; for ( int i = 1 ; i < owners . size ( ) ; ++ i ) { map . computeIfAbsent ( owners . get ( i ) , o -> IntSets . mutableEmptySet ( numSegments ) ) . set ( segment ) ; } } } return map ;
|
public class HeapCache { /** * Remove the entry from the hash and the replacement list .
* There is a race condition to catch : The eviction may run
* in a parallel thread and may have already selected this
* entry . */
protected boolean removeEntry ( Entry e ) { } }
|
int hc = extractModifiedHash ( e ) ; boolean _removed ; OptimisticLock l = hash . getSegmentLock ( hc ) ; long _stamp = l . writeLock ( ) ; try { _removed = hash . removeWithinLock ( e , hc ) ; e . setGone ( ) ; if ( _removed ) { eviction . submitWithoutEviction ( e ) ; } } finally { l . unlockWrite ( _stamp ) ; } checkForHashCodeChange ( e ) ; timing . cancelExpiryTimer ( e ) ; return _removed ;
|
public class StorageManager { /** * The operation mode of the Cassandra node .
* @ return one of : NORMAL , CLIENT , JOINING , LEAVING , DECOMMISSIONED , MOVING ,
* DRAINING , or DRAINED . */
@ Override public String getOperationMode ( ) { } }
|
String mode = getNode ( ) . getOperationMode ( ) ; if ( mode != null && NORMAL . equals ( mode . toUpperCase ( ) ) ) { mode = NORMAL ; } return mode ;
|
public class ObservableValueAssert { /** * Verifies that the actual observable has the same value as the given observable .
* @ param expectedValue the observable value to compare with the actual observables current value .
* @ return { @ code this } assertion instance . */
public ObservableValueAssert < T > hasSameValue ( ObservableValue < T > expectedValue ) { } }
|
new ObservableValueAssertions < > ( actual ) . hasSameValue ( expectedValue ) ; return this ;
|
public class ResourceXMLParser { /** * Given xml Node and EntitySet , parse the entity defined in the Node
* @ param node DOM node
* @ param set entity set holder
* @ return parsed Entity object
* @ throws ResourceXMLParserException if entity definition was previously found , or another error occurs */
private Entity parseEnt ( final Node node , final EntitySet set ) throws ResourceXMLParserException { } }
|
final Entity ent = parseResourceRef ( set , node ) ; ent . setResourceType ( node . getName ( ) ) ; parseEntProperties ( ent , node ) ; parseEntSubAttributes ( ent , node ) ; return ent ;
|
public class CmsSearchStateParameters { /** * Returns the parameter key of the facet with the given name .
* @ param facet the facet ' s name .
* @ return the parameter key for the facet . */
String getFacetParamKey ( String facet ) { } }
|
I_CmsSearchControllerFacetField fieldFacet = m_result . getController ( ) . getFieldFacets ( ) . getFieldFacetController ( ) . get ( facet ) ; if ( fieldFacet != null ) { return fieldFacet . getConfig ( ) . getParamKey ( ) ; } I_CmsSearchControllerFacetRange rangeFacet = m_result . getController ( ) . getRangeFacets ( ) . getRangeFacetController ( ) . get ( facet ) ; if ( rangeFacet != null ) { return rangeFacet . getConfig ( ) . getParamKey ( ) ; } I_CmsSearchControllerFacetQuery queryFacet = m_result . getController ( ) . getQueryFacet ( ) ; if ( ( queryFacet != null ) && queryFacet . getConfig ( ) . getName ( ) . equals ( facet ) ) { return queryFacet . getConfig ( ) . getParamKey ( ) ; } // Facet did not exist
LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FACET_NOT_CONFIGURED_1 , facet ) , new Throwable ( ) ) ; return null ;
|
public class RefreshedEvent { /** * Fires a refreshed event on all registered handlers in the handler manager . If
* no such handlers exist , this method will do nothing .
* @ param source the source of the handlers */
public static void fire ( final HasRefreshedHandlers source ) { } }
|
if ( TYPE != null ) { RefreshedEvent event = new RefreshedEvent ( ) ; source . fireEvent ( event ) ; }
|
public class DefaultSessionFactory { /** * Configures this factory to use a single identity authenticated by the
* supplied private key and pass phrase . The private key should be the path
* to a private key file in OpenSSH format . Clears out the current
* { @ link IdentityRepository } before adding this key .
* @ param privateKey
* Path to a private key file
* @ param passPhrase
* Pass phrase for private key
* @ throws JSchException
* If the key is invalid */
public void setIdentityFromPrivateKey ( String privateKey , String passPhrase ) throws JSchException { } }
|
clearIdentityRepository ( ) ; jsch . addIdentity ( privateKey , passPhrase ) ;
|
public class UndertowServerEngineFactory { /** * Retrieve a previously configured HttpServerEngine for the
* given port . If none exists , this call returns null . */
public synchronized UndertowServerEngine retrieveHttpServerEngine ( int port ) { } }
|
UndertowServerEngine engine = null ; synchronized ( portMap ) { engine = portMap . get ( port ) ; } return engine ;
|
public class StorePackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getActionState ( ) { } }
|
if ( actionStateEEnum == null ) { actionStateEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 42 ) ; } return actionStateEEnum ;
|
public class WellRenderer { /** * This methods generates the HTML code of the current b : well .
* < code > encodeBegin < / code > generates the start of the component . After the ,
* the JSF framework calls < code > encodeChildren ( ) < / code > to generate the
* HTML code between the beginning and the end of the component . For
* instance , in the case of a panel component the content of the panel is
* generated by < code > encodeChildren ( ) < / code > . After that ,
* < code > encodeEnd ( ) < / code > is called to generate the rest of the HTML code .
* @ param context
* the FacesContext .
* @ param component
* the current b : well .
* @ throws IOException
* thrown if something goes wrong when writing the HTML code . */
@ Override public void encodeBegin ( FacesContext context , UIComponent component ) throws IOException { } }
|
if ( ! component . isRendered ( ) ) { return ; } Well well = ( Well ) component ; ResponseWriter rw = context . getResponseWriter ( ) ; String clientId = well . getClientId ( ) ; String sz = well . getSize ( ) ; rw . startElement ( "div" , well ) ; rw . writeAttribute ( "id" , clientId , "id" ) ; String style = well . getStyle ( ) ; if ( null != style ) { rw . writeAttribute ( "style" , style , null ) ; } String styleClass = well . getStyleClass ( ) ; if ( null == styleClass ) styleClass = "" ; else styleClass = " " + styleClass ; styleClass += Responsive . getResponsiveStyleClass ( well , false ) ; Tooltip . generateTooltip ( context , well , rw ) ; if ( sz != null ) { rw . writeAttribute ( "class" , "well well-" + sz + styleClass , "class" ) ; } else { rw . writeAttribute ( "class" , "well" + styleClass , "class" ) ; } beginDisabledFieldset ( well , rw ) ; Object value = well . getValue ( ) ; if ( null != value ) { rw . writeText ( String . valueOf ( value ) , null ) ; }
|
public class Tile { /** * Gets the geographic extend of this Tile as a BoundingBox .
* @ return boundaries of this tile . */
public BoundingBox getBoundingBox ( ) { } }
|
if ( this . boundingBox == null ) { double minLatitude = Math . max ( MercatorProjection . LATITUDE_MIN , MercatorProjection . tileYToLatitude ( tileY + 1 , zoomLevel ) ) ; double minLongitude = Math . max ( - 180 , MercatorProjection . tileXToLongitude ( this . tileX , zoomLevel ) ) ; double maxLatitude = Math . min ( MercatorProjection . LATITUDE_MAX , MercatorProjection . tileYToLatitude ( this . tileY , zoomLevel ) ) ; double maxLongitude = Math . min ( 180 , MercatorProjection . tileXToLongitude ( tileX + 1 , zoomLevel ) ) ; if ( maxLongitude == - 180 ) { // fix for dateline crossing , where the right tile starts at - 180 and causes an invalid bbox
maxLongitude = 180 ; } this . boundingBox = new BoundingBox ( minLatitude , minLongitude , maxLatitude , maxLongitude ) ; } return this . boundingBox ;
|
public class ImageLoaderEngine { /** * Submits task to execution pool */
void submit ( final LoadAndDisplayImageTask task ) { } }
|
taskDistributor . execute ( new Runnable ( ) { @ Override public void run ( ) { File image = configuration . diskCache . get ( task . getLoadingUri ( ) ) ; boolean isImageCachedOnDisk = image != null && image . exists ( ) ; initExecutorsIfNeed ( ) ; if ( isImageCachedOnDisk ) { taskExecutorForCachedImages . execute ( task ) ; } else { taskExecutor . execute ( task ) ; } } } ) ;
|
public class ReceiveMessageBuilder { /** * Expect this message payload data in received message .
* @ param payloadResource
* @ param charset
* @ return */
public T payload ( Resource payloadResource , Charset charset ) { } }
|
try { setPayload ( FileUtils . readToString ( payloadResource , charset ) ) ; } catch ( IOException e ) { throw new CitrusRuntimeException ( "Failed to read payload resource" , e ) ; } return self ;
|
public class JobsImpl { /** * Lists the execution status of the Job Preparation and Job Release task for the specified job across the compute nodes where the job has run .
* This API returns the Job Preparation and Job Release task status on all compute nodes that have run the Job Preparation or Job Release task . This includes nodes which have since been removed from the pool . If this API is invoked on a job which has no Job Preparation or Job Release task , the Batch service returns HTTP status code 409 ( Conflict ) with an error code of JobPreparationTaskNotSpecified .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ param jobListPreparationAndReleaseTaskStatusNextOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; JobPreparationAndReleaseTaskExecutionInformation & gt ; object if successful . */
public PagedList < JobPreparationAndReleaseTaskExecutionInformation > listPreparationAndReleaseTaskStatusNext ( final String nextPageLink , final JobListPreparationAndReleaseTaskStatusNextOptions jobListPreparationAndReleaseTaskStatusNextOptions ) { } }
|
ServiceResponseWithHeaders < Page < JobPreparationAndReleaseTaskExecutionInformation > , JobListPreparationAndReleaseTaskStatusHeaders > response = listPreparationAndReleaseTaskStatusNextSinglePageAsync ( nextPageLink , jobListPreparationAndReleaseTaskStatusNextOptions ) . toBlocking ( ) . single ( ) ; return new PagedList < JobPreparationAndReleaseTaskExecutionInformation > ( response . body ( ) ) { @ Override public Page < JobPreparationAndReleaseTaskExecutionInformation > nextPage ( String nextPageLink ) { return listPreparationAndReleaseTaskStatusNextSinglePageAsync ( nextPageLink , jobListPreparationAndReleaseTaskStatusNextOptions ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
|
public class CmsSecurityManager { /** * Copies a resource . < p >
* You must ensure that the destination path is an absolute , valid and
* existing VFS path . Relative paths from the source are currently not supported . < p >
* The copied resource will always be locked to the current user
* after the copy operation . < p >
* In case the target resource already exists , it is overwritten with the
* source resource . < p >
* The < code > siblingMode < / code > parameter controls how to handle siblings
* during the copy operation . < br >
* Possible values for this parameter are : < br >
* < ul >
* < li > < code > { @ link org . opencms . file . CmsResource # COPY _ AS _ NEW } < / code > < / li >
* < li > < code > { @ link org . opencms . file . CmsResource # COPY _ AS _ SIBLING } < / code > < / li >
* < li > < code > { @ link org . opencms . file . CmsResource # COPY _ PRESERVE _ SIBLING } < / code > < / li >
* < / ul > < p >
* @ param context the current request context
* @ param source the resource to copy
* @ param destination the name of the copy destination with complete path
* @ param siblingMode indicates how to handle siblings during copy
* @ throws CmsException if something goes wrong
* @ throws CmsSecurityException if resource could not be copied
* @ see CmsObject # copyResource ( String , String , CmsResource . CmsResourceCopyMode )
* @ see org . opencms . file . types . I _ CmsResourceType # copyResource ( CmsObject , CmsSecurityManager , CmsResource , String , CmsResource . CmsResourceCopyMode ) */
public void copyResource ( CmsRequestContext context , CmsResource source , String destination , CmsResource . CmsResourceCopyMode siblingMode ) throws CmsException , CmsSecurityException { } }
|
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; checkPermissions ( dbc , source , CmsPermissionSet . ACCESS_READ , true , CmsResourceFilter . ALL ) ; if ( source . isFolder ( ) && destination . startsWith ( source . getRootPath ( ) ) ) { throw new CmsVfsException ( Messages . get ( ) . container ( Messages . ERR_RECURSIVE_INCLUSION_2 , dbc . removeSiteRoot ( source . getRootPath ( ) ) , dbc . removeSiteRoot ( destination ) ) ) ; } // target permissions will be checked later
m_driverManager . copyResource ( dbc , source , destination , siblingMode ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_COPY_RESOURCE_2 , dbc . removeSiteRoot ( source . getRootPath ( ) ) , dbc . removeSiteRoot ( destination ) ) , e ) ; } finally { dbc . clear ( ) ; }
|
public class FileTransferNegotiator { /** * Checks to see if all file transfer related services are enabled on the
* connection .
* @ param connection The connection to check
* @ return True if all related services are enabled , false if they are not . */
public static boolean isServiceEnabled ( final XMPPConnection connection ) { } }
|
ServiceDiscoveryManager manager = ServiceDiscoveryManager . getInstanceFor ( connection ) ; List < String > namespaces = new ArrayList < > ( ) ; namespaces . addAll ( Arrays . asList ( NAMESPACE ) ) ; namespaces . add ( DataPacketExtension . NAMESPACE ) ; if ( ! IBB_ONLY ) { namespaces . add ( Bytestream . NAMESPACE ) ; } for ( String namespace : namespaces ) { if ( ! manager . includesFeature ( namespace ) ) { return false ; } } return true ;
|
public class ConfigurationReader { /** * Parses the sql parameter section .
* @ param node
* Reference to the current used xml node
* @ param config
* Reference to the ConfigSettings */
private void parseSQLConfig ( final Node node , final ConfigSettings config ) { } }
|
String name , value ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_HOST ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; config . setConfigParameter ( ConfigurationKeys . SQL_HOST , value ) ; } else if ( name . equals ( KEY_DATABASE ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; config . setConfigParameter ( ConfigurationKeys . SQL_DATABASE , value ) ; } else if ( name . equals ( KEY_USER ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; config . setConfigParameter ( ConfigurationKeys . SQL_USERNAME , value ) ; } else if ( name . equals ( KEY_PASSWORD ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; config . setConfigParameter ( ConfigurationKeys . SQL_PASSWORD , value ) ; } }
|
public class Field { /** * Returns the set of fields indexed by soy accessor name for the given type . */
public static < T extends Field > ImmutableMap < String , T > getFieldsForType ( Descriptor descriptor , Set < FieldDescriptor > extensions , Factory < T > factory ) { } }
|
ImmutableMap . Builder < String , T > fields = ImmutableMap . builder ( ) ; for ( FieldDescriptor fieldDescriptor : descriptor . getFields ( ) ) { if ( ProtoUtils . shouldJsIgnoreField ( fieldDescriptor ) ) { continue ; } T field = factory . create ( fieldDescriptor ) ; fields . put ( field . getName ( ) , field ) ; } SetMultimap < String , T > extensionsBySoyName = MultimapBuilder . hashKeys ( ) . hashSetValues ( ) . build ( ) ; for ( FieldDescriptor extension : extensions ) { T field = factory . create ( extension ) ; extensionsBySoyName . put ( field . getName ( ) , field ) ; } for ( Map . Entry < String , Set < T > > group : Multimaps . asMap ( extensionsBySoyName ) . entrySet ( ) ) { Set < T > ambiguousFields = group . getValue ( ) ; String fieldName = group . getKey ( ) ; if ( ambiguousFields . size ( ) == 1 ) { fields . put ( fieldName , Iterables . getOnlyElement ( ambiguousFields ) ) ; } else { T value = factory . createAmbiguousFieldSet ( ambiguousFields ) ; logger . severe ( "Proto " + descriptor . getFullName ( ) + " has multiple extensions with the name \"" + fieldName + "\": " + fullFieldNames ( ambiguousFields ) + "\nThis field will not be accessible from soy" ) ; fields . put ( fieldName , value ) ; } } return fields . build ( ) ;
|
public class JCudaDriver { /** * Binds an array as a texture reference .
* < pre >
* CUresult cuTexRefSetArray (
* CUtexref hTexRef ,
* CUarray hArray ,
* unsigned int Flags )
* < / pre >
* < div >
* < p > Binds an array as a texture reference .
* Binds the CUDA array < tt > hArray < / tt > to the texture reference < tt > hTexRef < / tt > . Any previous address or CUDA array state associated with
* the texture reference is superseded by this function . < tt > Flags < / tt >
* must be set to CU _ TRSA _ OVERRIDE _ FORMAT . Any CUDA array previously bound
* to < tt > hTexRef < / tt > is unbound .
* < / div >
* @ param hTexRef Texture reference to bind
* @ param hArray Array to bind
* @ param Flags Options ( must be CU _ TRSA _ OVERRIDE _ FORMAT )
* @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED ,
* CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ VALUE
* @ see JCudaDriver # cuTexRefSetAddress
* @ see JCudaDriver # cuTexRefSetAddress2D
* @ see JCudaDriver # cuTexRefSetAddressMode
* @ see JCudaDriver # cuTexRefSetFilterMode
* @ see JCudaDriver # cuTexRefSetFlags
* @ see JCudaDriver # cuTexRefSetFormat
* @ see JCudaDriver # cuTexRefGetAddress
* @ see JCudaDriver # cuTexRefGetAddressMode
* @ see JCudaDriver # cuTexRefGetArray
* @ see JCudaDriver # cuTexRefGetFilterMode
* @ see JCudaDriver # cuTexRefGetFlags
* @ see JCudaDriver # cuTexRefGetFormat */
public static int cuTexRefSetArray ( CUtexref hTexRef , CUarray hArray , int Flags ) { } }
|
return checkResult ( cuTexRefSetArrayNative ( hTexRef , hArray , Flags ) ) ;
|
public class XmlParser { /** * Parse XML from a String . */
public synchronized Node parse ( String xml ) throws IOException , SAXException { } }
|
ByteArrayInputStream is = new ByteArrayInputStream ( xml . getBytes ( ) ) ; return parse ( is ) ;
|
public class Utils { /** * Throws a { @ link NullPointerException } if the given map is null or empty . */
@ NonNull public static < K , V > Map < K , V > assertNotNullOrEmpty ( Map < K , V > data , @ Nullable String name ) { } }
|
if ( isNullOrEmpty ( data ) ) { throw new NullPointerException ( name + " cannot be null or empty" ) ; } return data ;
|
public class BinaryComparable { /** * Compare bytes from { # getBytes ( ) } .
* @ see org . apache . hadoop . io . WritableComparator # compareBytes ( byte [ ] , int , int , byte [ ] , int , int ) */
public int compareTo ( BinaryComparable other ) { } }
|
if ( this == other ) return 0 ; return WritableComparator . compareBytes ( getBytes ( ) , 0 , getLength ( ) , other . getBytes ( ) , 0 , other . getLength ( ) ) ;
|
public class IPAddressSegment { /** * Check that the range resulting from the mask is contiguous , otherwise we cannot represent it .
* For instance , for the range 0 to 3 ( bits are 00 to 11 ) , if we mask all 4 numbers from 0 to 3 with 2 ( ie bits are 10 ) ,
* then we are left with 1 and 3 . 2 is not included . So we cannot represent 1 and 3 as a contiguous range .
* The underlying rule is that mask bits that are 0 must be above the resulting range in each segment .
* Any bit in the mask that is 0 must not fall below any bit in the masked segment range that is different between low and high .
* Any network mask must eliminate the entire segment range . Any host mask is fine .
* @ param maskValue
* @ param segmentPrefixLength
* @ return
* @ throws PrefixLenException */
public boolean isMaskCompatibleWithRange ( int maskValue , Integer segmentPrefixLength ) throws PrefixLenException { } }
|
if ( ! isMultiple ( ) ) { return true ; } return super . isMaskCompatibleWithRange ( maskValue , segmentPrefixLength , getNetwork ( ) . getPrefixConfiguration ( ) . allPrefixedAddressesAreSubnets ( ) ) ;
|
public class Files { /** * Convert qualified name to platform specific path and add given extension . Uses { @ link # dot2path ( String ) } to convert
* < code > qualifiedName < / code > to file path then add give < code > fileExtension < / code > . Is legal for
* < code > fileExtension < / code > to start with dot .
* < pre >
* js . net . Transaction java - & gt ; js / net / Transaction . java or js \ net \ Transaction . java
* js . net . Transaction . java - & gt ; js / net / Transaction . java or js \ net \ Transaction . java
* < / pre >
* Returns null if < code > qualifiedName < / code > parameter is null . If < code > fileExtension < / code > parameter is null
* resulting path has no extension .
* @ param qualifiedName qualified name ,
* @ param fileExtension requested file extension , leading dot accepted .
* @ return resulting file path or null . */
public static String dot2path ( String qualifiedName , String fileExtension ) { } }
|
if ( qualifiedName == null ) { return null ; } StringBuilder path = new StringBuilder ( ) ; path . append ( dot2path ( qualifiedName ) ) ; if ( fileExtension != null ) { if ( fileExtension . charAt ( 0 ) != '.' ) { path . append ( '.' ) ; } path . append ( fileExtension ) ; } return path . toString ( ) ;
|
public class AmazonInspectorClient { /** * Creates a new assessment target using the ARN of the resource group that is generated by
* < a > CreateResourceGroup < / a > . If resourceGroupArn is not specified , all EC2 instances in the current AWS account
* and region are included in the assessment target . If the < a
* href = " https : / / docs . aws . amazon . com / inspector / latest / userguide / inspector _ slr . html " > service - linked role < / a > isn ’ t
* already registered , this action also creates and registers a service - linked role to grant Amazon Inspector access
* to AWS Services needed to perform security assessments . You can create up to 50 assessment targets per AWS
* account . You can run up to 500 concurrent agents per AWS account . For more information , see < a
* href = " https : / / docs . aws . amazon . com / inspector / latest / userguide / inspector _ applications . html " > Amazon Inspector
* Assessment Targets < / a > .
* @ param createAssessmentTargetRequest
* @ return Result of the CreateAssessmentTarget operation returned by the service .
* @ throws InternalException
* Internal server error .
* @ throws InvalidInputException
* The request was rejected because an invalid or out - of - range value was supplied for an input parameter .
* @ throws LimitExceededException
* The request was rejected because it attempted to create resources beyond the current AWS account limits .
* The error code describes the limit exceeded .
* @ throws AccessDeniedException
* You do not have required permissions to access the requested resource .
* @ throws NoSuchEntityException
* The request was rejected because it referenced an entity that does not exist . The error code describes
* the entity .
* @ throws InvalidCrossAccountRoleException
* Amazon Inspector cannot assume the cross - account role that it needs to list your EC2 instances during the
* assessment run .
* @ throws ServiceTemporarilyUnavailableException
* The serice is temporary unavailable .
* @ sample AmazonInspector . CreateAssessmentTarget
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / inspector - 2016-02-16 / CreateAssessmentTarget "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateAssessmentTargetResult createAssessmentTarget ( CreateAssessmentTargetRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateAssessmentTarget ( request ) ;
|
public class AdvancedRecyclerArrayAdapter { /** * Swaps the data , removes all existing data and replaces them with a new set of data . { @ link
* DiffUtil } will coordinate to update notifications . Make sure { @ link # getItemId ( Object ) } is
* implemented correctly .
* @ param newObjects new set of data
* @ see # isContentTheSame ( Object , Object )
* @ see # isItemTheSame ( Object , Object ) */
@ SuppressWarnings ( "ConstantConditions" ) public void swap ( @ Nullable final List < T > newObjects ) { } }
|
if ( newObjects == null ) { clear ( ) ; } else { synchronized ( mLock ) { final DiffUtil . DiffResult result = DiffUtil . calculateDiff ( new DiffUtil . Callback ( ) { @ Override public boolean areContentsTheSame ( final int oldItemPosition , final int newItemPosition ) { final T oldItem = mObjects . get ( oldItemPosition ) ; final T newItem = newObjects . get ( newItemPosition ) ; return isContentTheSame ( oldItem , newItem ) ; } @ Override public boolean areItemsTheSame ( final int oldItemPosition , final int newItemPosition ) { final T oldItem = mObjects . get ( oldItemPosition ) ; final T newItem = newObjects . get ( newItemPosition ) ; return isItemTheSame ( oldItem , newItem ) ; } @ Override public int getNewListSize ( ) { return newObjects . size ( ) ; } @ Override public int getOldListSize ( ) { return mObjects . size ( ) ; } } ) ; mObjects . clear ( ) ; mObjects . addAll ( newObjects ) ; result . dispatchUpdatesTo ( this ) ; } }
|
public class SearchableTextComponent { /** * Add highlights with the given color to the text component for all
* the given points
* @ param points The points , containing start and end indices
* @ param color The color */
private void addHighlights ( Collection < ? extends Point > points , Color color ) { } }
|
removeHighlights ( points ) ; Map < Point , Object > newHighlights = JTextComponents . addHighlights ( textComponent , points , color ) ; highlights . putAll ( newHighlights ) ;
|
public class AbstractRequestSingleton { /** * Get the singleton object in the current request scope , using the passed
* class . If the singleton is not yet instantiated , a new instance is created .
* @ param < T >
* The type to be returned
* @ param aClass
* The class to be used . May not be < code > null < / code > . The class must
* be public as needs to have a public no - argument constructor .
* @ return The singleton object and never < code > null < / code > . */
@ Nonnull public static final < T extends AbstractRequestSingleton > T getRequestSingleton ( @ Nonnull final Class < T > aClass ) { } }
|
return getSingleton ( _getStaticScope ( true ) , aClass ) ;
|
public class Types { /** * Gets the boxed type of a class
* @ param type The type
* @ return The boxed type */
public static Type boxedType ( Type type ) { } }
|
if ( type instanceof Class < ? > ) { return boxedClass ( ( Class < ? > ) type ) ; } else { return type ; }
|
public class ControllerRunner { /** * Injects FreeMarker tags with dependencies from Guice module . */
private void injectFreemarkerTags ( ) { } }
|
if ( ! tagsInjected ) { AbstractFreeMarkerConfig freeMarkerConfig = Configuration . getFreeMarkerConfig ( ) ; Injector injector = Configuration . getInjector ( ) ; tagsInjected = true ; if ( injector == null || freeMarkerConfig == null ) { return ; } freeMarkerConfig . inject ( injector ) ; }
|
public class CleaneLingSolver { /** * Backward subsumes from clause .
* @ param c the clause
* @ param ignore the literal to ignore */
private void backward ( final CLClause c , final int ignore ) { } }
|
int minlit = 0 ; int minoccs = Integer . MAX_VALUE ; int litoccs ; this . stats . steps ++ ; for ( int i = 0 ; i < c . lits ( ) . size ( ) ; i ++ ) { final int lit = c . lits ( ) . get ( i ) ; if ( lit == ignore ) { continue ; } if ( val ( lit ) < 0 ) { continue ; } litoccs = occs ( lit ) . count ( ) ; if ( minlit != 0 && minoccs >= litoccs ) { continue ; } minlit = lit ; minoccs = litoccs ; } if ( minoccs >= this . config . bwocclim ) { return ; } assert minlit != 0 ; for ( int i = 0 ; i < c . lits ( ) . size ( ) ; i ++ ) { mark ( c . lits ( ) . get ( i ) ) ; } final CLOccs os = occs ( minlit ) ; for ( final CLClause d : os ) { if ( d == c ) { continue ; } int lit ; int count = this . seen . size ( ) ; int negated = 0 ; this . stats . steps ++ ; for ( int p = 0 ; count != 0 && p < d . lits ( ) . size ( ) ; p ++ ) { lit = d . lits ( ) . get ( p ) ; final int m = marked ( lit ) ; if ( m == 0 ) { continue ; } assert count > 0 ; count -- ; if ( m > 0 ) { continue ; } assert m < 0 ; if ( negated != 0 ) { count = Integer . MAX_VALUE ; break ; } negated = lit ; } if ( count != 0 ) { continue ; } if ( negated != 0 ) { this . tostrengthen . push ( new Pair < > ( d , negated ) ) ; this . stats . backwardStrengthened ++ ; } else { this . stats . backwardSubsumed ++ ; dumpClause ( d ) ; } } unmark ( ) ;
|
public class SearchProductsResult { /** * The product view aggregations .
* @ return The product view aggregations . */
public java . util . Map < String , java . util . List < ProductViewAggregationValue > > getProductViewAggregations ( ) { } }
|
return productViewAggregations ;
|
public class Factory { /** * Creates a matrix of all zeros . */
public static DenseMatrix matrix ( int nrows , int ncols ) { } }
|
if ( nlmatrixZeros != null ) { try { return ( DenseMatrix ) nlmatrixZeros . newInstance ( nrows , ncols ) ; } catch ( Exception e ) { logger . error ( "Failed to call NLMatrix(int, int): {}" , e ) ; } } return new JMatrix ( nrows , ncols ) ;
|
public class WritingBenchmark { /** * Benchmarks writing via { @ link FileOutputStream } with using a { @ link ByteBuffer } for buffering .
* @ param configuration
* Configuration with target file
* @ throws IOException
* Failed to write to target file */
@ Benchmark @ BenchmarkMode ( Mode . AverageTime ) public void byteBufferFileOutputStream ( final Configuration configuration ) throws IOException { } }
|
ByteBuffer buffer = ByteBuffer . allocate ( BUFFER_CAPACITY ) ; try ( OutputStream stream = new FileOutputStream ( configuration . file ) ) { for ( long i = 0 ; i < LINES ; ++ i ) { if ( buffer . remaining ( ) < DATA . length ) { stream . write ( buffer . array ( ) , 0 , buffer . position ( ) ) ; buffer . rewind ( ) ; } if ( buffer . remaining ( ) < DATA . length ) { stream . write ( DATA ) ; } else { buffer . put ( DATA ) ; } } if ( buffer . position ( ) > 0 ) { stream . write ( buffer . array ( ) , 0 , buffer . position ( ) ) ; } }
|
public class Wxs { /** * 创建一个图文响应 */
public static WxOutMsg respNews ( String to , List < WxArticle > articles ) { } }
|
WxOutMsg out = new WxOutMsg ( "news" ) ; out . setArticles ( articles ) ; if ( to != null ) out . setToUserName ( to ) ; return out ;
|
public class Dial { /** * Simple synchronous dial . The dial method won ' t return until the dial
* starts . Using this method will lockup your UI until the dial starts . For
* better control use the async Dial method below . */
static private void syncDial ( ) { } }
|
try { PBX pbx = PBXFactory . getActivePBX ( ) ; // The trunk MUST match the section header ( e . g . [ default ] ) that
// appears
// in your / etc / asterisk / sip . d file ( assuming you are using a SIP
// trunk ) .
// The trunk is used to select which SIP trunk to dial through .
Trunk trunk = pbx . buildTrunk ( "default" ) ; // We are going to dial from extension 100
EndPoint from = pbx . buildEndPoint ( TechType . SIP , "100" ) ; // The caller ID to show on extension 100.
CallerID fromCallerID = pbx . buildCallerID ( "100" , "My Phone" ) ; // The caller ID to display on the called parties phone
CallerID toCallerID = pbx . buildCallerID ( "83208100" , "Asterisk Java is calling" ) ; // The party we are going to call .
EndPoint to = pbx . buildEndPoint ( TechType . SIP , trunk , "5551234" ) ; // Trunk is currently ignored so set to null
// The call is dialed and only returns when the call comes up ( it
// doesn ' t wait for the remote end to answer ) .
DialActivity dial = pbx . dial ( from , fromCallerID , to , toCallerID ) ; Call call = dial . getNewCall ( ) ; Thread . sleep ( 20000 ) ; logger . warn ( "Hanging up" ) ; pbx . hangup ( call ) ; } catch ( PBXException | InterruptedException e ) { System . out . println ( e ) ; }
|
public class ConjunctionMatcher { /** * Returns a composite matcher that comprises of this matcher logically ANDed with the specified matcher .
* Note that this method returns a new matcher and does not modify this instance .
* @ param matcher
* the matcher to logically AND to this matcher
* @ return the composed matcher */
public ConjunctionMatcher < T > and ( Matcher < ? super T > matcher ) { } }
|
requireNonNull ( matcher , "matcher" ) ; return new ConjunctionMatcher < > ( compositeDescription , concat ( matchers , matcher ) ) ;
|
public class SolrHttpRequestHandler { /** * connection - see SOLR - 8453 and SOLR - 8683 */
private void consumeInputFully ( HttpServletRequest req ) { } }
|
try { ServletInputStream is = req . getInputStream ( ) ; while ( ! is . isFinished ( ) && is . read ( ) != - 1 ) { } } catch ( IOException e ) { log . info ( "Could not consume full client request" , e ) ; }
|
public class cachecontentgroup { /** * Use this API to flush cachecontentgroup resources . */
public static base_responses flush ( nitro_service client , cachecontentgroup resources [ ] ) throws Exception { } }
|
base_responses result = null ; if ( resources != null && resources . length > 0 ) { cachecontentgroup flushresources [ ] = new cachecontentgroup [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { flushresources [ i ] = new cachecontentgroup ( ) ; flushresources [ i ] . name = resources [ i ] . name ; flushresources [ i ] . query = resources [ i ] . query ; flushresources [ i ] . host = resources [ i ] . host ; flushresources [ i ] . selectorvalue = resources [ i ] . selectorvalue ; flushresources [ i ] . force = resources [ i ] . force ; } result = perform_operation_bulk_request ( client , flushresources , "flush" ) ; } return result ;
|
public class XmlDataProviderImpl { /** * Generates an XML string containing only the nodes filtered by the XPath expression .
* @ param document
* An XML { @ link org . dom4j . Document }
* @ param xpathExpression
* A string indicating the XPath expression to be evaluated .
* @ return A string of XML data with root node named " root " . */
@ SuppressWarnings ( "unchecked" ) private String getFilteredXml ( Document document , String xpathExpression ) { } }
|
logger . entering ( new Object [ ] { document , xpathExpression } ) ; List < Node > nodes = ( List < Node > ) document . selectNodes ( xpathExpression ) ; StringBuilder newDocument = new StringBuilder ( document . asXML ( ) . length ( ) ) ; newDocument . append ( "<root>" ) ; for ( Node n : nodes ) { newDocument . append ( n . asXML ( ) ) ; } newDocument . append ( "</root>" ) ; logger . exiting ( newDocument ) ; return newDocument . toString ( ) ;
|
public class MultiMap { /** * Put all contents of map .
* @ param m Map */
public void putAll ( Map m ) { } }
|
Iterator i = m . entrySet ( ) . iterator ( ) ; boolean multi = m instanceof MultiMap ; while ( i . hasNext ( ) ) { Map . Entry entry = ( Map . Entry ) i . next ( ) ; if ( multi ) super . put ( entry . getKey ( ) , LazyList . clone ( entry . getValue ( ) ) ) ; else put ( entry . getKey ( ) , entry . getValue ( ) ) ; }
|
public class InstancesDistributor { /** * Locates a file in the temporal folder
* @ param conf
* The Hadoop Configuration .
* @ param filename
* The file name .
* @ throws IOException */
private static Path locateFileInCache ( Configuration conf , String filename ) throws IOException { } }
|
return new Path ( getInstancesFolder ( FileSystem . get ( conf ) , conf ) , filename ) ;
|
public class AiMaterial { /** * Returns the shading mode . < p >
* If missing , defaults to { @ link AiShadingMode # FLAT }
* @ return the shading mode */
public AiShadingMode getShadingMode ( ) { } }
|
Property p = getProperty ( PropertyKey . SHADING_MODE . m_key ) ; if ( null == p || null == p . getData ( ) ) { return ( AiShadingMode ) m_defaults . get ( PropertyKey . SHADING_MODE ) ; } return AiShadingMode . fromRawValue ( ( Integer ) p . getData ( ) ) ;
|
public class ListBackupPlanVersionsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListBackupPlanVersionsRequest listBackupPlanVersionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( listBackupPlanVersionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listBackupPlanVersionsRequest . getBackupPlanId ( ) , BACKUPPLANID_BINDING ) ; protocolMarshaller . marshall ( listBackupPlanVersionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listBackupPlanVersionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Graph { /** * Runs a ScatterGather iteration on the graph with configuration options .
* @ param scatterFunction the scatter function
* @ param gatherFunction the gather function
* @ param maximumNumberOfIterations maximum number of iterations to perform
* @ param parameters the iteration configuration parameters
* @ return the updated Graph after the scatter - gather iteration has converged or
* after maximumNumberOfIterations . */
public < M > Graph < K , VV , EV > runScatterGatherIteration ( ScatterFunction < K , VV , M , EV > scatterFunction , org . apache . flink . graph . spargel . GatherFunction < K , VV , M > gatherFunction , int maximumNumberOfIterations , ScatterGatherConfiguration parameters ) { } }
|
ScatterGatherIteration < K , VV , M , EV > iteration = ScatterGatherIteration . withEdges ( edges , scatterFunction , gatherFunction , maximumNumberOfIterations ) ; iteration . configure ( parameters ) ; DataSet < Vertex < K , VV > > newVertices = this . getVertices ( ) . runOperation ( iteration ) ; return new Graph < > ( newVertices , this . edges , this . context ) ;
|
public class CommonOps_DDF5 { /** * Extracts all diagonal elements from ' input ' and places them inside the ' out ' vector . Elements
* are in sequential order .
* @ param input Matrix . Not modified .
* @ param out Vector containing diagonal elements . Modified . */
public static void diag ( DMatrix5x5 input , DMatrix5 out ) { } }
|
out . a1 = input . a11 ; out . a2 = input . a22 ; out . a3 = input . a33 ; out . a4 = input . a44 ; out . a5 = input . a55 ;
|
public class RoleReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return Role ResourceSet */
@ Override public ResourceSet < Role > read ( final TwilioRestClient client ) { } }
|
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
|
public class ParsedOptions { private static List < String > splitProperty ( String propertyString ) throws InvalidCommandException { } }
|
List < String > tokens = ImmutableList . copyOf ( Splitter . on ( '=' ) . limit ( 2 ) . split ( propertyString ) ) ; if ( tokens . size ( ) != 2 ) { throw new InvalidCommandException ( "no '=' found in: " + propertyString ) ; } return tokens ;
|
public class ZealotKhala { /** * 生成带 " AND " 前缀 " IS NULL " 的SQL片段 .
* < p > 示例 : 传入 { " a . name " } 参数 , 生成的SQL片段为 : " AND a . name IS NULL " < / p >
* @ param field 数据库字段
* @ return ZealotKhala实例 */
public ZealotKhala andIsNull ( String field ) { } }
|
return this . doIsNull ( ZealotConst . AND_PREFIX , field , true , true ) ;
|
public class Parallax { /** * BackgroundComponent */
@ Override public void update ( double extrp , int x , int y , double speed ) { } }
|
// This will avoid bug on huge speed ( lines out of screen )
final double speedWrap = 2.56 * factH / 0.0084 ; final double wrapedSpeed = UtilMath . wrapDouble ( speed , - speedWrap , speedWrap ) ; // Move each line , depending of its id and size
for ( int lineNum = 0 ; lineNum < parallaxsNumber ; lineNum ++ ) { // CHECKSTYLE IGNORE LINE : MagicNumber
this . x [ lineNum ] += 0.2 * lineNum * wrapedSpeed * 0.042 ; // CHECKSTYLE IGNORE LINE : MagicNumber
x2 [ lineNum ] += wrapedSpeed * 0.25 ; // When line has arrived to its border
final double secondLine = this . x [ 1 ] ; // CHECKSTYLE IGNORE LINE : MagicNumber
if ( secondLine >= 2.56 * factH || secondLine <= - 2.56 * factH ) { for ( int j = 0 ; j < parallaxsNumber ; j ++ ) { this . x [ j ] = 0.0 ; x2 [ j ] = 0.0 ; } } this . y [ lineNum ] = lineNum + y + ( double ) mainY ; }
|
public class ApiOvhEmaildomain { /** * Get this object properties
* REST : GET / email / domain / delegatedAccount / { email } / filter / { name }
* @ param email [ required ] Email
* @ param name [ required ] Filter name */
public OvhFilter delegatedAccount_email_filter_name_GET ( String email , String name ) throws IOException { } }
|
String qPath = "/email/domain/delegatedAccount/{email}/filter/{name}" ; StringBuilder sb = path ( qPath , email , name ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFilter . class ) ;
|
public class ForwardingTransformerHandlerBase { /** * TransformerHandler */
public void setResult ( Result result ) throws IllegalArgumentException { } }
|
Check . notNull ( result ) ; if ( result instanceof SAXResult ) { setTarget ( ( SAXResult ) result ) ; } else { TransformerHandler th = saxHelper . newIdentityTransformerHandler ( ) ; th . setResult ( result ) ; setTarget ( new SAXResult ( th ) ) ; }
|
public class ValueDataResourceHolder { /** * Aquire ValueData resource .
* @ param resource
* Object
* @ param lockHolder
* ValueLockSupport
* @ throws InterruptedException
* if resource lock is interrupted
* @ return boolean , false - if the resource reaquired by the same user ( Thread ) , true otherwise
* @ throws IOException
* if lock error occurs */
public boolean aquire ( final Object resource , final ValueLockSupport lockHolder ) throws InterruptedException , IOException { } }
|
final Thread myThread = Thread . currentThread ( ) ; final VDResource res = resources . get ( resource ) ; if ( res != null ) { if ( res . addUserLock ( myThread , lockHolder ) ) // resource locked in this thread ( by me )
return false ; synchronized ( res . lock ) { // resource locked , wait for unlock
res . lock . wait ( ) ; // new record with existing lock ( to respect Object . notify ( ) )
resources . put ( resource , new VDResource ( myThread , res . lock , lockHolder ) ) ; } } else resources . put ( resource , new VDResource ( myThread , new Object ( ) , lockHolder ) ) ; return true ;
|
public class ClientConnectionManager { /** * Initialises the channel framework . This must be called before any
* other method may be invoked in this class . Don ' t worry - you can call
* this multiple times without anything bad happening .
* @ throws SIResourceException */
public static synchronized void initialise ( ) throws SIResourceException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "initialise" ) ; if ( ! initialised ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "not previously initialised" ) ; // new com . ibm . ws . sib . jfapchannel . impl . ClientConnectionManagerImpl ( ) ;
Class clientImpl = instance . getClass ( ) ; Method initialiseMethod ; try { initialiseMethod = clientImpl . getMethod ( "initialise" , new Class [ ] { } ) ; initialiseMethod . invoke ( clientImpl , new Object [ ] { } ) ; initialised = true ; // D223632
} catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.jfapchannel.ClientConnectionManager.initialise" , JFapChannelConstants . CLNTCONNMGR_INITIALISE_01 ) ; SibTr . error ( tc , "EXCP_DURING_INIT_SICJ0002" , new Object [ ] { "initialise" , JFapChannelConstants . CLIENT_MANAGER_CLASS , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) SibTr . exception ( tc , e ) ; throw new SIResourceException ( e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initialise" ) ;
|
public class CloudSnippets { /** * Example of running a query with timestamp query parameters . */
public void runQueryWithTimestampParameters ( ) throws InterruptedException { } }
|
// [ START bigquery _ query _ params _ timestamps ]
// BigQuery bigquery = BigQueryOptions . getDefaultInstance ( ) . getService ( ) ;
ZonedDateTime timestamp = LocalDateTime . of ( 2016 , 12 , 7 , 8 , 0 , 0 ) . atZone ( ZoneOffset . UTC ) ; String query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" ; // Note : Standard SQL is required to use query parameters .
QueryJobConfiguration queryConfig = QueryJobConfiguration . newBuilder ( query ) . addNamedParameter ( "ts_value" , QueryParameterValue . timestamp ( // Timestamp takes microseconds since 1970-01-01T00:00:00 UTC
timestamp . toInstant ( ) . toEpochMilli ( ) * 1000 ) ) . build ( ) ; // Print the results .
DateTimeFormatter formatter = DateTimeFormatter . ISO_INSTANT . withZone ( ZoneOffset . UTC ) ; for ( FieldValueList row : bigquery . query ( queryConfig ) . iterateAll ( ) ) { System . out . printf ( "%s\n" , formatter . format ( Instant . ofEpochMilli ( // Timestamp values are returned in microseconds since 1970-01-01T00:00:00
// UTC ,
// but org . joda . time . DateTime constructor accepts times in milliseconds .
row . get ( 0 ) . getTimestampValue ( ) / 1000 ) . atOffset ( ZoneOffset . UTC ) ) ) ; System . out . printf ( "\n" ) ; } // [ END bigquery _ query _ params _ timestamps ]
|
public class InternalXbaseWithAnnotationsParser { /** * $ ANTLR start synpred32 _ InternalXbaseWithAnnotations */
public final void synpred32_InternalXbaseWithAnnotations_fragment ( ) throws RecognitionException { } }
|
// InternalXbaseWithAnnotations . g : 3483:6 : ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) )
// InternalXbaseWithAnnotations . g : 3483:7 : ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' )
{ // InternalXbaseWithAnnotations . g : 3483:7 : ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' )
// InternalXbaseWithAnnotations . g : 3484:7 : ' ( ' ( ( ruleJvmFormalParameter ) ) ' : '
{ match ( input , 14 , FOLLOW_22 ) ; if ( state . failed ) return ; // InternalXbaseWithAnnotations . g : 3485:7 : ( ( ruleJvmFormalParameter ) )
// InternalXbaseWithAnnotations . g : 3486:8 : ( ruleJvmFormalParameter )
{ // InternalXbaseWithAnnotations . g : 3486:8 : ( ruleJvmFormalParameter )
// InternalXbaseWithAnnotations . g : 3487:9 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_51 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } match ( input , 62 , FOLLOW_2 ) ; if ( state . failed ) return ; } }
|
public class IntegralImageOps { /** * Converts a regular image into an integral image .
* @ param input Regular image . Not modified .
* @ param transformed Integral image . If null a new image will be created . Modified .
* @ return Integral image . */
public static GrayS64 transform ( GrayS64 input , GrayS64 transformed ) { } }
|
transformed = InputSanityCheck . checkDeclare ( input , transformed , GrayS64 . class ) ; ImplIntegralImageOps . transform ( input , transformed ) ; return transformed ;
|
public class GenericsUtils { /** * Get the actual type according to the placeholder name
* @ param placeholderName the placeholder name , e . g . T , E
* @ param genericsPlaceholderAndTypeMap the result of { @ link # makeDeclaringAndActualGenericsTypeMap ( ClassNode , ClassNode ) }
* @ return the actual type */
public static ClassNode findActualTypeByGenericsPlaceholderName ( String placeholderName , Map < GenericsType , GenericsType > genericsPlaceholderAndTypeMap ) { } }
|
for ( Map . Entry < GenericsType , GenericsType > entry : genericsPlaceholderAndTypeMap . entrySet ( ) ) { GenericsType declaringGenericsType = entry . getKey ( ) ; if ( placeholderName . equals ( declaringGenericsType . getName ( ) ) ) { return entry . getValue ( ) . getType ( ) . redirect ( ) ; } } return null ;
|
public class PubSubOutputHandler { /** * Get the set of topics associated with this OutputHandler */
public String [ ] getTopics ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getTopics" ) ; String [ ] topics = null ; if ( _subscriptionState != null ) topics = _subscriptionState . getTopics ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getTopics" , topics ) ; return topics ;
|
public class ApiOvhSms { /** * Delete the document from the slot
* REST : DELETE / sms / { serviceName } / receivers / { slotId }
* @ param serviceName [ required ] The internal name of your SMS offer
* @ param slotId [ required ] Slot number id */
public void serviceName_receivers_slotId_DELETE ( String serviceName , Long slotId ) throws IOException { } }
|
String qPath = "/sms/{serviceName}/receivers/{slotId}" ; StringBuilder sb = path ( qPath , serviceName , slotId ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ;
|
public class NumberMath { /** * / * package private */
static NumberMath getMath ( Number number ) { } }
|
if ( isLong ( number ) ) { return LongMath . INSTANCE ; } if ( isFloatingPoint ( number ) ) { return FloatingPointMath . INSTANCE ; } if ( isBigDecimal ( number ) ) { return BigDecimalMath . INSTANCE ; } if ( isBigInteger ( number ) ) { return BigIntegerMath . INSTANCE ; } if ( isInteger ( number ) || isShort ( number ) || isByte ( number ) ) { return IntegerMath . INSTANCE ; } // also for custom Number implementations
return BigDecimalMath . INSTANCE ;
|
public class LazyUtil { /** * Check is current object was initialized
* @ param object - object , which need check
* @ return boolean value */
public static boolean isPropertyInitialized ( Object object ) { } }
|
Class < ? > cl = getHibernateClass ( ) ; if ( cl == null ) { return true ; } Method method = getInitializeMethod ( cl ) ; return checkInitialize ( method , object ) ;
|
public class ModelMetricsHandler { /** * Score a frame with the given model and return the metrics AND the prediction frame . */
@ SuppressWarnings ( "unused" ) // called through reflection by RequestServer
public ModelMetricsListSchemaV3 predict ( int version , ModelMetricsListSchemaV3 s ) { } }
|
// parameters checking :
if ( s . model == null ) throw new H2OIllegalArgumentException ( "model" , "predict" , null ) ; if ( DKV . get ( s . model . name ) == null ) throw new H2OKeyNotFoundArgumentException ( "model" , "predict" , s . model . name ) ; // Aggregator doesn ' t need a Frame to ' predict '
if ( s . exemplar_index < 0 ) { if ( s . frame == null ) throw new H2OIllegalArgumentException ( "frame" , "predict" , null ) ; if ( DKV . get ( s . frame . name ) == null ) throw new H2OKeyNotFoundArgumentException ( "frame" , "predict" , s . frame . name ) ; } ModelMetricsList parms = s . createAndFillImpl ( ) ; Frame predictions ; Frame deviances = null ; if ( ! s . reconstruction_error && ! s . reconstruction_error_per_feature && s . deep_features_hidden_layer < 0 && ! s . project_archetypes && ! s . reconstruct_train && ! s . leaf_node_assignment && ! s . predict_staged_proba && ! s . predict_contributions && s . exemplar_index < 0 ) { if ( null == parms . _predictions_name ) parms . _predictions_name = "predictions" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; String customMetricFunc = s . custom_metric_func ; if ( customMetricFunc == null ) { customMetricFunc = parms . _model . _parms . _custom_metric_func ; } predictions = parms . _model . score ( parms . _frame , parms . _predictions_name , null , true , CFuncRef . from ( customMetricFunc ) ) ; if ( s . deviances ) { if ( ! parms . _model . isSupervised ( ) ) throw new H2OIllegalArgumentException ( "Deviances can only be computed for supervised models." ) ; if ( null == parms . _deviances_name ) parms . _deviances_name = "deviances" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; deviances = parms . _model . computeDeviances ( parms . _frame , predictions , parms . _deviances_name ) ; } } else { if ( s . deviances ) throw new H2OIllegalArgumentException ( "Cannot compute deviances in combination with other special predictions." ) ; if ( Model . DeepFeatures . class . isAssignableFrom ( parms . _model . getClass ( ) ) ) { if ( s . reconstruction_error || s . reconstruction_error_per_feature ) { if ( s . deep_features_hidden_layer >= 0 ) throw new H2OIllegalArgumentException ( "Can only compute either reconstruction error OR deep features." , "" ) ; if ( null == parms . _predictions_name ) parms . _predictions_name = "reconstruction_error" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . DeepFeatures ) parms . _model ) . scoreAutoEncoder ( parms . _frame , Key . make ( parms . _predictions_name ) , parms . _reconstruction_error_per_feature ) ; } else { if ( s . deep_features_hidden_layer < 0 ) throw new H2OIllegalArgumentException ( "Deep features hidden layer index must be >= 0." , "" ) ; if ( null == parms . _predictions_name ) parms . _predictions_name = "deep_features" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . DeepFeatures ) parms . _model ) . scoreDeepFeatures ( parms . _frame , s . deep_features_hidden_layer ) ; } predictions = new Frame ( Key . < Frame > make ( parms . _predictions_name ) , predictions . names ( ) , predictions . vecs ( ) ) ; DKV . put ( predictions . _key , predictions ) ; } else if ( Model . GLRMArchetypes . class . isAssignableFrom ( parms . _model . getClass ( ) ) ) { if ( s . project_archetypes ) { if ( parms . _predictions_name == null ) parms . _predictions_name = "reconstructed_archetypes_" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_of_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . GLRMArchetypes ) parms . _model ) . scoreArchetypes ( parms . _frame , Key . < Frame > make ( parms . _predictions_name ) , s . reverse_transform ) ; } else { assert s . reconstruct_train ; if ( parms . _predictions_name == null ) parms . _predictions_name = "reconstruction_" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_of_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . GLRMArchetypes ) parms . _model ) . scoreReconstruction ( parms . _frame , Key . < Frame > make ( parms . _predictions_name ) , s . reverse_transform ) ; } } else if ( s . leaf_node_assignment ) { assert ( Model . LeafNodeAssignment . class . isAssignableFrom ( parms . _model . getClass ( ) ) ) ; if ( null == parms . _predictions_name ) parms . _predictions_name = "leaf_node_assignment" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; Model . LeafNodeAssignment . LeafNodeAssignmentType type = null == s . leaf_node_assignment_type ? Model . LeafNodeAssignment . LeafNodeAssignmentType . Path : s . leaf_node_assignment_type ; predictions = ( ( Model . LeafNodeAssignment ) parms . _model ) . scoreLeafNodeAssignment ( parms . _frame , type , Key . < Frame > make ( parms . _predictions_name ) ) ; } else if ( s . predict_staged_proba ) { if ( ! ( parms . _model instanceof Model . StagedPredictions ) ) { throw new H2OIllegalArgumentException ( "Model type " + parms . _model . _parms . algoName ( ) + " doesn't support Staged Predictions." ) ; } if ( null == parms . _predictions_name ) parms . _predictions_name = "staged_proba_" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . StagedPredictions ) parms . _model ) . scoreStagedPredictions ( parms . _frame , Key . < Frame > make ( parms . _predictions_name ) ) ; } else if ( s . predict_contributions ) { if ( ! ( parms . _model instanceof Model . Contributions ) ) { throw new H2OIllegalArgumentException ( "Model type " + parms . _model . _parms . algoName ( ) + " doesn't support calculating Feature Contributions." ) ; } if ( null == parms . _predictions_name ) parms . _predictions_name = "contributions_" + Key . make ( ) . toString ( ) . substring ( 0 , 5 ) + "_" + parms . _model . _key . toString ( ) + "_on_" + parms . _frame . _key . toString ( ) ; predictions = ( ( Model . Contributions ) parms . _model ) . scoreContributions ( parms . _frame , Key . < Frame > make ( parms . _predictions_name ) ) ; } else if ( s . exemplar_index >= 0 ) { assert ( Model . ExemplarMembers . class . isAssignableFrom ( parms . _model . getClass ( ) ) ) ; if ( null == parms . _predictions_name ) parms . _predictions_name = "members_" + parms . _model . _key . toString ( ) + "_for_exemplar_" + parms . _exemplar_index ; predictions = ( ( Model . ExemplarMembers ) parms . _model ) . scoreExemplarMembers ( Key . < Frame > make ( parms . _predictions_name ) , parms . _exemplar_index ) ; } else throw new H2OIllegalArgumentException ( "Requires a Deep Learning, GLRM, DRF or GBM model." , "Model must implement specific methods." ) ; } ModelMetricsListSchemaV3 mm = this . fetch ( version , s ) ; // TODO : for now only binary predictors write an MM object .
// For the others cons one up here to return the predictions frame .
if ( null == mm ) mm = new ModelMetricsListSchemaV3 ( ) ; mm . predictions_frame = new KeyV3 . FrameKeyV3 ( predictions . _key ) ; if ( parms . _leaf_node_assignment ) // don ' t show metrics in leaf node assignments are made
mm . model_metrics = null ; if ( deviances != null ) mm . deviances_frame = new KeyV3 . FrameKeyV3 ( deviances . _key ) ; if ( null == mm . model_metrics || 0 == mm . model_metrics . length ) { // There was no response in the test set - > cannot make a model _ metrics object
} else { mm . model_metrics [ 0 ] . predictions = new FrameV3 ( predictions , 0 , 100 ) ; // TODO : Should call schema ( version )
} return mm ;
|
public class MtasMaximumExpandSpans { /** * Two phase current doc matches .
* @ return true , if successful
* @ throws IOException Signals that an I / O exception has occurred . */
private boolean twoPhaseCurrentDocMatches ( ) throws IOException { } }
|
if ( docId != subSpans . docID ( ) ) { reset ( ) ; docId = subSpans . docID ( ) ; IndexDoc doc = mtasCodecInfo . getDoc ( field , docId ) ; if ( doc != null ) { minPosition = doc . minPosition ; maxPosition = doc . maxPosition ; } else { minPosition = NO_MORE_POSITIONS ; maxPosition = NO_MORE_POSITIONS ; } } if ( docId == NO_MORE_DOCS ) { return false ; } else { return goToNextStartPosition ( ) ; }
|
public class AbstractAmazonSQSAsync { /** * Simplified method form for invoking the GetQueueAttributes operation .
* @ see # getQueueAttributesAsync ( GetQueueAttributesRequest ) */
@ Override public java . util . concurrent . Future < GetQueueAttributesResult > getQueueAttributesAsync ( String queueUrl , java . util . List < String > attributeNames ) { } }
|
return getQueueAttributesAsync ( new GetQueueAttributesRequest ( ) . withQueueUrl ( queueUrl ) . withAttributeNames ( attributeNames ) ) ;
|
public class SEPWorker { /** * realtime we have spun too much and deschedule ; if we get too far behind realtime , we reset to our initial offset */
private void maybeStop ( long stopCheck , long now ) { } }
|
long delta = now - stopCheck ; if ( delta <= 0 ) { // if stopCheck has caught up with present , we ' ve been spinning too much , so if we can atomically
// set it to the past again , we should stop a worker
if ( pool . stopCheck . compareAndSet ( stopCheck , now - stopCheckInterval ) ) { // try and stop ourselves ;
// if we ' ve already been assigned work stop another worker
if ( ! assign ( Work . STOP_SIGNALLED , true ) ) pool . schedule ( Work . STOP_SIGNALLED ) ; } } else if ( soleSpinnerSpinTime > stopCheckInterval && pool . spinningCount . get ( ) == 1 ) { // permit self - stopping
assign ( Work . STOP_SIGNALLED , true ) ; } else { // if stop check has gotten too far behind present , update it so new spins can affect it
while ( delta > stopCheckInterval * 2 && ! pool . stopCheck . compareAndSet ( stopCheck , now - stopCheckInterval ) ) { stopCheck = pool . stopCheck . get ( ) ; delta = now - stopCheck ; } }
|
public class InternalXbaseParser { /** * InternalXbase . g : 4657:1 : entryRuleXBooleanLiteral returns [ EObject current = null ] : iv _ ruleXBooleanLiteral = ruleXBooleanLiteral EOF ; */
public final EObject entryRuleXBooleanLiteral ( ) throws RecognitionException { } }
|
EObject current = null ; EObject iv_ruleXBooleanLiteral = null ; try { // InternalXbase . g : 4657:56 : ( iv _ ruleXBooleanLiteral = ruleXBooleanLiteral EOF )
// InternalXbase . g : 4658:2 : iv _ ruleXBooleanLiteral = ruleXBooleanLiteral EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBooleanLiteralRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXBooleanLiteral = ruleXBooleanLiteral ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXBooleanLiteral ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class Routers { /** * Returns the default implementation of the { @ link Router } to find a { @ link ServiceConfig } .
* It consists of several router implementations which use one of Trie and List . It also includes
* cache mechanism to improve its performance . */
public static Router < ServiceConfig > ofVirtualHost ( VirtualHost virtualHost , Iterable < ServiceConfig > configs , RejectedPathMappingHandler rejectionHandler ) { } }
|
requireNonNull ( virtualHost , "virtualHost" ) ; requireNonNull ( configs , "configs" ) ; requireNonNull ( rejectionHandler , "rejectionHandler" ) ; final BiConsumer < PathMapping , PathMapping > rejectionConsumer = ( mapping , existingMapping ) -> { try { rejectionHandler . handleDuplicatePathMapping ( virtualHost , mapping , existingMapping ) ; } catch ( Exception e ) { logger . warn ( "Unexpected exception from a {}:" , RejectedPathMappingHandler . class . getSimpleName ( ) , e ) ; } } ; return wrapVirtualHostRouter ( defaultRouter ( configs , ServiceConfig :: pathMapping , rejectionConsumer ) ) ;
|
public class AmazonElasticLoadBalancingClient { /** * Modifies the specified rule .
* Any existing properties that you do not modify retain their current values .
* To modify the actions for the default rule , use < a > ModifyListener < / a > .
* @ param modifyRuleRequest
* @ return Result of the ModifyRule operation returned by the service .
* @ throws TargetGroupAssociationLimitException
* You ' ve reached the limit on the number of load balancers per target group .
* @ throws IncompatibleProtocolsException
* The specified configuration is not valid with this protocol .
* @ throws RuleNotFoundException
* The specified rule does not exist .
* @ throws OperationNotPermittedException
* This operation is not allowed .
* @ throws TooManyRegistrationsForTargetIdException
* You ' ve reached the limit on the number of times a target can be registered with a load balancer .
* @ throws TooManyTargetsException
* You ' ve reached the limit on the number of targets .
* @ throws TargetGroupNotFoundException
* The specified target group does not exist .
* @ throws UnsupportedProtocolException
* The specified protocol is not supported .
* @ throws TooManyActionsException
* You ' ve reached the limit on the number of actions per rule .
* @ throws InvalidLoadBalancerActionException
* The requested action is not valid .
* @ sample AmazonElasticLoadBalancing . ModifyRule
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancingv2-2015-12-01 / ModifyRule "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ModifyRuleResult modifyRule ( ModifyRuleRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeModifyRule ( request ) ;
|
public class StringUtils { /** * Hashes a String using the SHA - 1 algorithm and returns the result as a
* String of hexadecimal numbers . This method is synchronized to avoid
* excessive MessageDigest object creation . If calling this method becomes
* a bottleneck in your code , you may wish to maintain a pool of
* MessageDigest objects instead of using this method .
* A hash is a one - way function - - that is , given an
* input , an output is easily computed . However , given the output , the
* input is almost impossible to compute . This is useful for passwords
* since we can store the hash and a hacker will then have a very hard time
* determining the original password .
* @ param data the String to compute the hash of .
* @ return a hashed version of the passed - in String
* @ deprecated use { @ link org . jivesoftware . smack . util . SHA1 # hex ( String ) } instead . */
@ Deprecated public static synchronized String hash ( String data ) { } }
|
return org . jivesoftware . smack . util . SHA1 . hex ( data ) ;
|
public class PreambleUtil { /** * Returns the defined byte offset from the start of the preamble given the < i > HiField < / i >
* and the < i > Format < / i > .
* Note this can not be used to obtain the stream offsets .
* @ param format the desired < i > Format < / i >
* @ param hiField the desired preamble < i > HiField < / i > after the first eight bytes .
* @ return the defined byte offset from the start of the preamble for the given < i > HiField < / i >
* and the < i > Format < / i > . */
static long getHiFieldOffset ( final Format format , final HiField hiField ) { } }
|
final int formatIdx = format . ordinal ( ) ; final int hiFieldIdx = hiField . ordinal ( ) ; final long fieldOffset = hiFieldOffset [ formatIdx ] [ hiFieldIdx ] & 0xFF ; // initially a byte
if ( fieldOffset == 0 ) { throw new SketchesStateException ( "Undefined preamble field given the Format: " + "Format: " + format . toString ( ) + ", HiField: " + hiField . toString ( ) ) ; } return fieldOffset ;
|
public class Statement { /** * Returns a list of all parameters contained by this statement and object .
* @ return List of parameters */
public List < Parameter > getAllParameters ( ) { } }
|
List < Parameter > ret = new ArrayList < Parameter > ( ) ; ret . addAll ( subject . getAllParameters ( ) ) ; if ( object != null ) { if ( object . getStatement ( ) != null ) ret . addAll ( object . getStatement ( ) . getAllParameters ( ) ) ; else ret . addAll ( object . getTerm ( ) . getAllParameters ( ) ) ; } return ret ;
|
public class Request { /** * Set ( or overwrite ) a parameter with multiple values .
* < br >
* The parameter will be used to create a query string for GET - requests and as the body for POST - requests
* with MIME - type < code > application / x - www - form - urlencoded < / code > .
* < br >
* If you use this method , you don ' t have to call { @ link # multipleValues ( ) } , but you should not mix
* using { @ link # param ( String , Object ) } and this method for the same parameter name as this might cause
* unexpected behaviour or exceptions .
* @ param name the name of the parameter ( it ' s better to use only contain ASCII characters )
* @ param values the values of the parameter ; will be expanded to multiple valued parameters .
* @ return < code > this < / code > for method chaining ( fluent API )
* @ since 1.3.0 */
public Request param ( String name , Iterable < Object > values ) { } }
|
if ( params == null ) { params = new LinkedHashMap < String , Object > ( ) ; } params . put ( name , values ) ; return this ;
|
public class RtcpHandler { /** * Upon joining the session , the participant initializes tp to 0 , tc to 0 , senders to 0 , pmembers to 1 , members to 1,
* we _ sent to false , rtcp _ bw to the specified fraction of the session bandwidth , initial to true , and avg _ rtcp _ size to the
* probable size of the first RTCP packet that the application will later construct .
* The calculated interval T is then computed , and the first packet is scheduled for time tn = T . This means that a
* transmission timer is set which expires at time T . Note that an application MAY use any desired approach for implementing
* this timer .
* The participant adds its own SSRC to the member table . */
public void joinRtpSession ( ) { } }
|
if ( ! this . joined . get ( ) ) { // Schedule first RTCP packet
long t = this . statistics . rtcpInterval ( this . initial . get ( ) ) ; this . tn = this . statistics . getCurrentTime ( ) + t ; scheduleRtcp ( this . tn , RtcpPacketType . RTCP_REPORT ) ; // Start SSRC timeout timer
this . ssrcTaskFuture = this . scheduler . scheduleWithFixedDelay ( ssrcTask , SSRC_TASK_DELAY , SSRC_TASK_DELAY , TimeUnit . MILLISECONDS ) ; this . joined . set ( true ) ; }
|
public class WCheckBox { /** * Override handleRequest in order to perform processing for this component . This implementation checks the checkbox
* state in the request .
* @ param request the request being responded to .
* @ return true if the check box has changed */
@ Override protected boolean doHandleRequest ( final Request request ) { } }
|
boolean selected = getRequestValue ( request ) ; boolean current = getValue ( ) ; boolean changed = current != selected ; if ( changed ) { setData ( selected ) ; } return changed ;
|
public class ModifyInstanceCreditSpecificationResult { /** * Information about the instances whose credit option for CPU usage was successfully modified .
* @ return Information about the instances whose credit option for CPU usage was successfully modified . */
public java . util . List < SuccessfulInstanceCreditSpecificationItem > getSuccessfulInstanceCreditSpecifications ( ) { } }
|
if ( successfulInstanceCreditSpecifications == null ) { successfulInstanceCreditSpecifications = new com . amazonaws . internal . SdkInternalList < SuccessfulInstanceCreditSpecificationItem > ( ) ; } return successfulInstanceCreditSpecifications ;
|
public class DefaultRouteService { /** * Overridden */
@ Override public int analyzeRoutes ( String client ) { } }
|
RouteGpsCoordinates lastRouteCoordinates = findLastRouteCoordinates ( client ) ; GpsCoordinates lastCoordinates = null ; if ( lastRouteCoordinates == null ) { LOG . info ( "No GPS coordinates assigned to routes for client {}" , client ) ; } else { lastCoordinates = mongoTemplate . findById ( lastRouteCoordinates . getCoordinatesId ( ) , GpsCoordinates . class , GpsCoordinates . class . getSimpleName ( ) ) ; } Query query = new Query ( ) ; query . addCriteria ( where ( "client" ) . is ( client ) ) ; if ( lastRouteCoordinates != null ) { query . addCriteria ( where ( "_id" ) . gt ( new ObjectId ( lastRouteCoordinates . getCoordinatesId ( ) ) ) ) ; } query . limit ( routeAnalysisBatchSize ) ; query . with ( new Sort ( ASC , "_id" ) ) ; List < GpsCoordinates > coordinatesToAnalyze = mongoTemplate . find ( query , GpsCoordinates . class , GpsCoordinates . class . getSimpleName ( ) ) ; for ( GpsCoordinates coordinates : coordinatesToAnalyze ) { String routeId ; if ( lastCoordinates == null || ( TimeUnit . MILLISECONDS . toMinutes ( coordinates . getTimestamp ( ) . getTime ( ) - lastCoordinates . getTimestamp ( ) . getTime ( ) ) > 5 ) ) { Route newRoute = createNewRoute ( client ) ; routeId = documentDriver . save ( collectionName ( newRoute . getClass ( ) ) , pojoToMap ( newRoute ) ) ; } else { routeId = lastRouteCoordinates . getRouteId ( ) ; } lastRouteCoordinates = new RouteGpsCoordinates ( null , routeId , coordinates . getId ( ) , client ) ; mongoTemplate . save ( lastRouteCoordinates , collectionName ( RouteGpsCoordinates . class ) ) ; lastCoordinates = coordinates ; } return coordinatesToAnalyze . size ( ) ;
|
public class ChunkField { /** * for synthetic fields */
public void set ( int [ ] indexes , int [ ] offsets , int [ ] lengths ) { } }
|
m_size = indexes . length ; m_valuesCount = offsets . length ; m_offsets = offsets ; m_lengths = lengths ; m_prefixes = new int [ offsets . length ] ; m_suffixes = new int [ offsets . length ] ; m_indexes = indexes ;
|
public class CassandraPojoOutputFormat { /** * Opens a Session to Cassandra and initializes the prepared statement .
* @ param taskNumber The number of the parallel instance . */
@ Override public void open ( int taskNumber , int numTasks ) { } }
|
this . session = cluster . connect ( ) ; MappingManager mappingManager = new MappingManager ( session ) ; this . mapper = mappingManager . mapper ( outputClass ) ; if ( mapperOptions != null ) { Mapper . Option [ ] optionsArray = mapperOptions . getMapperOptions ( ) ; if ( optionsArray != null ) { mapper . setDefaultSaveOptions ( optionsArray ) ; } } this . callback = new FutureCallback < Void > ( ) { @ Override public void onSuccess ( Void ignored ) { onWriteSuccess ( ) ; } @ Override public void onFailure ( Throwable t ) { onWriteFailure ( t ) ; } } ;
|
public class AngleAverageSeeker { /** * Returns true if average is within given delta .
* @ param delta
* @ return */
public boolean isWithin ( double delta ) { } }
|
readLock . lock ( ) ; try { if ( stats . count ( ) >= stats . getInitialSize ( ) ) { return ( Navis . angleDiff ( stats . getMin ( ) , stats . getMax ( ) ) < delta ) ; } else { return false ; } } finally { readLock . unlock ( ) ; }
|
public class BehaviorTreeReader { /** * line 315 " BehaviorTreeReader . rl " */
private static boolean containsFloatingPointCharacters ( String value ) { } }
|
for ( int i = 0 , n = value . length ( ) ; i < n ; i ++ ) { switch ( value . charAt ( i ) ) { case '.' : case 'E' : case 'e' : return true ; } } return false ;
|
public class TelemetryService { /** * force to flush events in the queue */
public void flush ( ) { } }
|
if ( ! enabled ) { return ; } if ( ! queue . isEmpty ( ) ) { // start a new thread to upload without blocking the current thread
Runnable runUpload = new TelemetryUploader ( this , exportQueueToString ( ) ) ; uploader . execute ( runUpload ) ; }
|
public class Array { public static char [ ] range ( char startInclusive , final char endExclusive ) { } }
|
if ( startInclusive >= endExclusive ) { return N . EMPTY_CHAR_ARRAY ; } final char [ ] a = new char [ endExclusive * 1 - startInclusive ] ; for ( int i = 0 , len = a . length ; i < len ; i ++ ) { a [ i ] = startInclusive ++ ; } return a ;
|
public class BoxApiAuthentication { /** * Create OAuth , to be called the first time session tries to authenticate . */
BoxCreateAuthRequest createOAuth ( String code , String clientId , String clientSecret ) { } }
|
BoxCreateAuthRequest request = new BoxCreateAuthRequest ( mSession , getTokenUrl ( ) , code , clientId , clientSecret ) ; return request ;
|
public class Numbers { /** * Represents the given { @ link Number } exactly as an int value without any
* magnitude and precision losses ; if that ' s not possible , fails by throwing
* an exception .
* @ param number the number to represent as an int value .
* @ return an int representation of the given number .
* @ throws IllegalArgumentException if no exact representation exists . */
public static int asIntExactly ( Number number ) { } }
|
Class clazz = number . getClass ( ) ; if ( isLongRepresentableExceptLong ( clazz ) ) { return number . intValue ( ) ; } else if ( clazz == Long . class ) { int intValue = number . intValue ( ) ; if ( number . longValue ( ) == ( long ) intValue ) { return intValue ; } } else if ( isDoubleRepresentable ( clazz ) ) { int intValue = number . intValue ( ) ; if ( equalDoubles ( number . doubleValue ( ) , ( double ) intValue ) ) { return intValue ; } } throw new IllegalArgumentException ( "Can't represent " + number + " as int exactly" ) ;
|
public class MBeanSampler { /** * Adds an { @ link info . ganglia . jmxetric . MBeanAttribute } to be sampled .
* @ param mbean
* name of the mbean
* @ param attr
* attribute to be sample
* @ throws Exception */
public void addMBeanAttribute ( String mbean , MBeanAttribute attr ) throws Exception { } }
|
MBeanHolder mbeanHolder = mbeanMap . get ( mbean ) ; if ( mbeanHolder == null ) { mbeanHolder = new MBeanHolder ( this , process , mbean ) ; mbeanMap . put ( mbean , mbeanHolder ) ; } mbeanHolder . addAttribute ( attr ) ; log . info ( "Added attribute " + attr + " to " + mbean ) ;
|
public class TenantServiceClient { /** * Retrieves specified tenant .
* < p > Sample code :
* < pre > < code >
* try ( TenantServiceClient tenantServiceClient = TenantServiceClient . create ( ) ) {
* TenantName name = TenantName . of ( " [ PROJECT ] " , " [ TENANT ] " ) ;
* Tenant response = tenantServiceClient . getTenant ( name ) ;
* < / code > < / pre >
* @ param name Required .
* < p > The resource name of the tenant to be retrieved .
* < p > The format is " projects / { project _ id } / tenants / { tenant _ id } " , for example ,
* " projects / api - test - project / tenants / foo " .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Tenant getTenant ( TenantName name ) { } }
|
GetTenantRequest request = GetTenantRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return getTenant ( request ) ;
|
public class CassandraCounter { /** * Gets all data points of a day specified by the timestamp , cache
* supported .
* @ param timestampMs
* @ return */
@ SuppressWarnings ( "unchecked" ) private Map < Long , DataPoint > _getRowWithCache ( long timestampMs ) { } }
|
int [ ] yyyymm_dd = toYYYYMM_DD ( timestampMs ) ; int yyyymmdd = yyyymm_dd [ 0 ] * 100 + yyyymm_dd [ 1 ] ; ICache cache = getCache ( ) ; String cacheKey = String . valueOf ( yyyymmdd ) ; Object temp = cache != null ? cache . get ( cacheKey ) : null ; Map < Long , DataPoint > result = ( Map < Long , DataPoint > ) ( temp instanceof Map ? temp : null ) ; if ( result == null ) { result = _getRow ( getName ( ) , yyyymm_dd [ 0 ] , yyyymm_dd [ 1 ] ) ; if ( cache != null ) { cache . set ( cacheKey , result ) ; } } return result ;
|
public class ThrowableExceptionMapper { /** * Maps an unhandled { @ link Throwable } to a { @ link Response } .
* @ param exception the { @ link Throwable } exception that was not handled
* @ return a { @ link Response } object with a status of 500 , content - type of ' application / json ' , and
* a { @ link JsonError } entity containing details about the unhandled exception in JSON format */
@ Override public Response toResponse ( Throwable exception ) { } }
|
logger . error ( "An unhandled exception was thrown." , exception ) ; return Response . status ( INTERNAL_SERVER_ERROR ) . entity ( JsonError . builder ( ) . code ( INTERNAL_SERVER_ERROR . getStatusCode ( ) ) . message ( INTERNAL_SERVER_ERROR . getReasonPhrase ( ) ) . build ( ) ) . type ( MediaType . APPLICATION_JSON ) . build ( ) ;
|
public class MongoHelpers { /** * Remove a column from the Document
* @ param entity the { @ link Document } with the column
* @ param column the column to remove */
public static void resetValue ( Document entity , String column ) { } }
|
// fast path for non - embedded case
if ( ! column . contains ( "." ) ) { entity . remove ( column ) ; } else { String [ ] path = DOT_SEPARATOR_PATTERN . split ( column ) ; Object field = entity ; int size = path . length ; for ( int index = 0 ; index < size ; index ++ ) { String node = path [ index ] ; Document parent = ( Document ) field ; field = parent . get ( node ) ; if ( field == null && index < size - 1 ) { // TODO clean up the hierarchy of empty containers
// no way to reach the leaf , nothing to do
return ; } if ( index == size - 1 ) { parent . remove ( node ) ; } } }
|
public class Environment { /** * Adds a Bean definition
* @ param bean the bean , or { @ code null } . */
void addBean ( Bean bean ) { } }
|
if ( bean == null ) return ; this . beans . add ( new BeanDefinition ( bean . name ( ) , bean . clazz ( ) ) ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.