signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Scanner { /** * Scans the next token of the input as a < tt > long < / tt > .
* This method will throw < code > InputMismatchException < / code >
* if the next token cannot be translated into a valid long value as
* described below . If the translation is successful , the scanner advances
* past the input that matched .
* < p > If the next token matches the < a
* href = " # Integer - regex " > < i > Integer < / i > < / a > regular expression defined
* above then the token is converted into a < tt > long < / tt > value as if by
* removing all locale specific prefixes , group separators , and locale
* specific suffixes , then mapping non - ASCII digits into ASCII
* digits via { @ link Character # digit Character . digit } , prepending a
* negative sign ( - ) if the locale specific negative prefixes and suffixes
* were present , and passing the resulting string to
* { @ link Long # parseLong ( String , int ) Long . parseLong } with the
* specified radix .
* @ param radix the radix used to interpret the token as an int value
* @ return the < tt > long < / tt > scanned from the input
* @ throws InputMismatchException
* if the next token does not match the < i > Integer < / i >
* regular expression , or is out of range
* @ throws NoSuchElementException if input is exhausted
* @ throws IllegalStateException if this scanner is closed */
public long nextLong ( int radix ) { } } | // Check cached result
if ( ( typeCache != null ) && ( typeCache instanceof Long ) && this . radix == radix ) { long val = ( ( Long ) typeCache ) . longValue ( ) ; useTypeCache ( ) ; return val ; } setRadix ( radix ) ; clearCaches ( ) ; try { String s = next ( integerPattern ( ) ) ; if ( matcher . group ( SIMPLE_GROUP_INDEX ) == null ) s = processIntegerToken ( s ) ; return Long . parseLong ( s , radix ) ; } catch ( NumberFormatException nfe ) { position = matcher . start ( ) ; // don ' t skip bad token
throw new InputMismatchException ( nfe . getMessage ( ) ) ; } |
public class SimpleCacheProvider { /** * XXX : prune 1 / N each time */
protected void pruneCache ( ) { } } | final long expired = System . currentTimeMillis ( ) - timeToLive ; this . cacheMap . values ( ) . removeIf ( entry -> entry . createTime < expired ) ; |
public class ImagePipeline { /** * Returns whether the image is stored in the disk cache .
* Performs disk cache check synchronously . It is not recommended to use this
* unless you know what exactly you are doing . Disk cache check is a costly operation ,
* the call will block the caller thread until the cache check is completed .
* @ param uri the uri for the image to be looked up .
* @ return true if the image was found in the disk cache , false otherwise . */
public boolean isInDiskCacheSync ( final Uri uri ) { } } | return isInDiskCacheSync ( uri , ImageRequest . CacheChoice . SMALL ) || isInDiskCacheSync ( uri , ImageRequest . CacheChoice . DEFAULT ) ; |
public class SamlSettingsApi { /** * Set SAML state .
* Change current SAML state .
* @ param saMLEnabled Value that define SAML state . ( required )
* @ return SetEnabledResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public SetEnabledResponse setEnabled ( Boolean saMLEnabled ) throws ApiException { } } | ApiResponse < SetEnabledResponse > resp = setEnabledWithHttpInfo ( saMLEnabled ) ; return resp . getData ( ) ; |
public class DDPStateSingleton { /** * Used to notify event system of connection events .
* Default behavior uses Android ' s LocalBroadcastManager .
* Override if you want to use a different eventbus .
* @ param ddpstate current DDP state */
public void broadcastConnectionState ( DDPSTATE ddpstate ) { } } | Intent broadcastIntent = new Intent ( ) ; broadcastIntent . setAction ( MESSAGE_CONNECTION ) ; broadcastIntent . putExtra ( MESSAGE_EXTRA_STATE , ddpstate . ordinal ( ) ) ; broadcastIntent . putExtra ( MESSAGE_EXTRA_USERID , mUserId ) ; broadcastIntent . putExtra ( MESSAGE_EXTRA_USERTOKEN , mResumeToken ) ; LocalBroadcastManager . getInstance ( mContext ) . sendBroadcast ( broadcastIntent ) ; |
public class PermitOverridesAlg { /** * / * ( non - Javadoc )
* @ see nl . uva . sne . midd . algorithms . CombiningAlgorithmInterface # combine ( nl . uva . sne . midd . DecisionType , nl . uva . sne . midd . DecisionType ) */
@ Override public DecisionType combine ( DecisionType op1 , DecisionType op2 ) { } } | if ( op1 == DecisionType . Permit || op2 == DecisionType . Permit ) { return DecisionType . Permit ; } if ( ( op1 == DecisionType . Indeterminate_DP ) || ( op2 == DecisionType . Indeterminate_DP ) ) { return DecisionType . Indeterminate_DP ; } if ( op1 == DecisionType . Indeterminate_P ) { if ( op2 == DecisionType . Deny || op2 == DecisionType . Indeterminate_D ) { return DecisionType . Indeterminate_DP ; } else { return DecisionType . Indeterminate_P ; } } if ( op2 == DecisionType . Indeterminate_P ) { if ( op1 == DecisionType . Deny || op1 == DecisionType . Indeterminate_D ) { return DecisionType . Indeterminate_DP ; } else { return DecisionType . Indeterminate_P ; } } if ( op1 == DecisionType . Deny || op2 == DecisionType . Deny ) { return DecisionType . Deny ; } if ( op1 == DecisionType . Indeterminate_D || op2 == DecisionType . Indeterminate_D ) { return DecisionType . Indeterminate_D ; } return DecisionType . NotApplicable ; |
public class DefaultGroovyMethods { /** * Coerces the closure to an implementation of the given class . The class
* is assumed to be an interface or class with a single method definition .
* The closure is used as the implementation of that single method .
* @ param cl the implementation of the single method
* @ param clazz the target type
* @ return a Proxy of the given type which wraps this closure .
* @ since 1.0 */
@ SuppressWarnings ( "unchecked" ) public static < T > T asType ( Closure cl , Class < T > clazz ) { } } | if ( clazz . isInterface ( ) && ! ( clazz . isInstance ( cl ) ) ) { if ( Traits . isTrait ( clazz ) ) { Method samMethod = CachedSAMClass . getSAMMethod ( clazz ) ; if ( samMethod != null ) { Map impl = Collections . singletonMap ( samMethod . getName ( ) , cl ) ; return ( T ) ProxyGenerator . INSTANCE . instantiateAggregate ( impl , Collections . < Class > singletonList ( clazz ) ) ; } } return ( T ) Proxy . newProxyInstance ( clazz . getClassLoader ( ) , new Class [ ] { clazz } , new ConvertedClosure ( cl ) ) ; } try { return asType ( ( Object ) cl , clazz ) ; } catch ( GroovyCastException ce ) { try { return ( T ) ProxyGenerator . INSTANCE . instantiateAggregateFromBaseClass ( cl , clazz ) ; } catch ( GroovyRuntimeException cause ) { throw new GroovyCastException ( "Error casting closure to " + clazz . getName ( ) + ", Reason: " + cause . getMessage ( ) ) ; } } |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public GSPTPATT createGSPTPATTFromString ( EDataType eDataType , String initialValue ) { } } | GSPTPATT result = GSPTPATT . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class RunStepRequest { /** * < pre >
* Partial run handle ( optional ) . If specified , this will be a partial run
* execution , run up to the specified fetches .
* < / pre >
* < code > optional string partial _ run _ handle = 6 ; < / code > */
public com . google . protobuf . ByteString getPartialRunHandleBytes ( ) { } } | java . lang . Object ref = partialRunHandle_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; partialRunHandle_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; } |
public class PackratParserGenUtil { /** * TODO SZ : replace return value with ICharacterClass or similar */
public static List < AbstractRule > getConflictingLexerRules ( final Keyword keyword , final Grammar grammar ) { } } | AbstractRule rule = GrammarUtil . findRuleForName ( grammar , "ID" ) ; if ( rule != null ) { // TODO SZ : use interpreter
final StringWithOffset input = new StringWithOffset ( keyword . getValue ( ) ) ; if ( new TerminalsIDConsumer ( new MyTerminalConsumerConfiguration ( input ) ) . consume ( ) == ConsumeResult . SUCCESS ) { return Collections . singletonList ( rule ) ; } } return Collections . emptyList ( ) ; |
public class Cache { /** * synchronize so that each access gets the same item instance ( protect
* against overlapping calls ) note that expiration logic of cache element
* changes the element ' s state - - elements are never removed from cache or
* replaced */
private final synchronized CacheElement getCacheElement ( String userid ) { } } | String m = getCacheAbbrev ( ) + " getCacheElement() " ; CacheElement cacheElement = null ; String key = getKey ( userid ) ; logger . debug ( m + "key==" + key ) ; if ( cache . containsKey ( key ) ) { logger . debug ( m + "cache already has element" ) ; } else { logger . debug ( m + "cache does not have element; create and put" ) ; CacheElement itemtemp = new CacheElement ( userid , getCacheId ( ) , getCacheAbbrev ( ) ) ; cache . put ( key , itemtemp ) ; } cacheElement = ( CacheElement ) cache . get ( key ) ; if ( cacheElement == null ) { logger . error ( m + "cache does not contain element" ) ; } else { logger . debug ( m + "element retrieved from cache successfully" ) ; } return cacheElement ; |
public class SimpleHttpSender { /** * Send a single event to the collector
* @ param eventPayload Event to sent , created by the EventBuilder
* @ return true on success ( collector got the event ) , false otherwise ( event was lost ) */
public Future < Boolean > send ( final String eventPayload ) { } } | if ( client == null || client . isClosed ( ) ) { client = new AsyncHttpClient ( clientConfig ) ; } try { final AsyncHttpClient . BoundRequestBuilder requestBuilder = client . prepareGet ( collectorURI + eventPayload ) ; log . debug ( "Sending event to collector: {}" , eventPayload ) ; activeRequests . incrementAndGet ( ) ; return client . executeRequest ( requestBuilder . build ( ) , new AsyncCompletionHandler < Boolean > ( ) { @ Override public Boolean onCompleted ( final Response response ) { activeRequests . decrementAndGet ( ) ; if ( response . getStatusCode ( ) == 202 ) { return true ; } else { log . warn ( "Received response from collector {}: {}" , response . getStatusCode ( ) , response . getStatusText ( ) ) ; return false ; } } @ Override public void onThrowable ( final Throwable t ) { activeRequests . decrementAndGet ( ) ; } } ) ; } catch ( IOException e ) { // Recycle the client
client . close ( ) ; return null ; } |
public class Script { /** * / / / / / Interface used during verification of transactions / blocks / / / / / */
private static int getSigOpCount ( List < ScriptChunk > chunks , boolean accurate ) throws ScriptException { } } | int sigOps = 0 ; int lastOpCode = OP_INVALIDOPCODE ; for ( ScriptChunk chunk : chunks ) { if ( chunk . isOpCode ( ) ) { switch ( chunk . opcode ) { case OP_CHECKSIG : case OP_CHECKSIGVERIFY : sigOps ++ ; break ; case OP_CHECKMULTISIG : case OP_CHECKMULTISIGVERIFY : if ( accurate && lastOpCode >= OP_1 && lastOpCode <= OP_16 ) sigOps += decodeFromOpN ( lastOpCode ) ; else sigOps += 20 ; break ; default : break ; } lastOpCode = chunk . opcode ; } } return sigOps ; |
public class IBAN { /** * Removes any spaces contained in the String thereby converting the input into a plain IBAN
* @ param input
* possibly pretty printed IBAN
* @ return plain IBAN */
public static String toPlain ( String input ) { } } | Matcher matcher = SPACE_PATTERN . matcher ( input ) ; if ( matcher . find ( ) ) { return matcher . replaceAll ( "" ) ; } else { return input ; } |
public class SchedulerUtils { /** * construct heron scheduler response basing on the given result
* @ param isOK whether the request successful */
public static Scheduler . SchedulerResponse constructSchedulerResponse ( boolean isOK ) { } } | Common . Status . Builder status = Common . Status . newBuilder ( ) ; if ( isOK ) { status . setStatus ( Common . StatusCode . OK ) ; } else { status . setStatus ( Common . StatusCode . NOTOK ) ; } return Scheduler . SchedulerResponse . newBuilder ( ) . setStatus ( status ) . build ( ) ; |
public class DataSourceCreator { /** * 扩展功能 , 可以自定义一些自己实现的 dataSource */
private DataSource preCreate ( Long pipelineId , DbMediaSource dbMediaSource ) { } } | if ( CollectionUtils . isEmpty ( dataSourceHandlers ) ) { return null ; } DataSource dataSource = null ; for ( DataSourceHanlder handler : dataSourceHandlers ) { if ( handler . support ( dbMediaSource ) ) { dataSource = handler . create ( pipelineId , dbMediaSource ) ; if ( dataSource != null ) { return dataSource ; } } } return null ; |
public class TasksInner { /** * Updates a task with the specified parameters .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param taskName The name of the container registry task .
* @ param taskUpdateParameters The parameters for updating a task .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < TaskInner > updateAsync ( String resourceGroupName , String registryName , String taskName , TaskUpdateParameters taskUpdateParameters ) { } } | return updateWithServiceResponseAsync ( resourceGroupName , registryName , taskName , taskUpdateParameters ) . map ( new Func1 < ServiceResponse < TaskInner > , TaskInner > ( ) { @ Override public TaskInner call ( ServiceResponse < TaskInner > response ) { return response . body ( ) ; } } ) ; |
public class ModbusSerialTransport { /** * In microseconds
* @ return Delay between frames */
int getInterFrameDelay ( ) { } } | if ( commPort . getBaudRate ( ) > 19200 ) { return 1750 ; } else { return Math . max ( getCharInterval ( Modbus . INTER_MESSAGE_GAP ) , Modbus . MINIMUM_TRANSMIT_DELAY ) ; } |
public class GreetingResource { @ POST @ Path ( "/{id}" ) @ Produces ( "application/json" ) @ Consumes ( "application/json" ) public GreetingResponse greet ( @ Context HttpServletRequest httpRequest , @ PathParam ( "id" ) String id , @ QueryParam ( "opt" ) String opt , GreetingRequest request ) { } } | GreetingResponse response = greet ( httpRequest , request ) ; response . setGreeting ( "Hello #" + id + ", " + request . getName ( ) + "!" ) ; return response ; |
public class JenkinsHashFunction { /** * Convert 4 bytes from the buffer at offset into a long value . */
private long fourByteToLong ( byte [ ] bytes , int offset ) { } } | return ( byteToLong ( bytes [ offset + 0 ] ) + ( byteToLong ( bytes [ offset + 1 ] ) << 8 ) + ( byteToLong ( bytes [ offset + 2 ] ) << 16 ) + ( byteToLong ( bytes [ offset + 3 ] ) << 24 ) ) ; |
public class Actions { /** * { @ link # NOTHING } with type
* @ param < E > Type of Action to be returned
* @ return */
public static < E > Action < E > nothing ( ) { } } | @ SuppressWarnings ( "unchecked" ) Action < E > result = ( Action < E > ) NOTHING ; return result ; |
public class LocalContainerRegistry { /** * / * ( non - Javadoc )
* @ see org . jboss . arquillian . impl . domain . ContainerRegistryA # create ( org . jboss . arquillian . impl . configuration . api . ContainerDef , org . jboss . arquillian . core . spi . ServiceLoader ) */
@ Override public Container create ( ContainerDef definition , ServiceLoader loader ) { } } | Validate . notNull ( definition , "Definition must be specified" ) ; try { // TODO : this whole Classloading thing is a HACK and does not work . Need to split out into multiple JVMs for multi container testing
// ClassLoader containerClassLoader ;
// if ( definition . getDependencies ( ) . size ( ) > 0)
// final MavenDependencyResolver resolver = DependencyResolvers . use ( MavenDependencyResolver . class ) . artifacts (
// definition . getDependencies ( ) . toArray ( new String [ 0 ] ) ) ;
// URL [ ] resolvedURLs = MapObject . convert ( resolver . resolveAsFiles ( ) ) ;
// containerClassLoader = new FilteredURLClassLoader ( resolvedURLs , " org . jboss . ( arquillian | shrinkwrap ) . . * " ) ;
// else
// containerClassLoader = LocalContainerRegistry . class . getClassLoader ( ) ;
return addContainer ( // before a Container is added to a collection of containers , inject into its injection point
injector . inject ( new ContainerImpl ( definition . getContainerName ( ) , loader . onlyOne ( DeployableContainer . class ) , definition ) ) ) ; } catch ( Exception e ) { throw new ContainerCreationException ( "Could not create Container " + definition . getContainerName ( ) , e ) ; } |
public class MultiRowIterator { /** * returns expected row count */
private int setNoLastRow ( int firstComponentCount ) { } } | Arrays . fill ( lastRow , Integer . MAX_VALUE ) ; return firstComponentCount * generator . clusteringDescendantAverages [ 0 ] ; |
public class LocalDate { /** * Returns a copy of this date with different local millis .
* The returned object will be a new instance of the same type .
* Only the millis will change , the chronology is kept .
* The returned object will be either be a new instance or < code > this < / code > .
* @ param newMillis the new millis , from 1970-01-01T00:00:00
* @ return a copy of this date with different millis */
LocalDate withLocalMillis ( long newMillis ) { } } | newMillis = iChronology . dayOfMonth ( ) . roundFloor ( newMillis ) ; return ( newMillis == getLocalMillis ( ) ? this : new LocalDate ( newMillis , getChronology ( ) ) ) ; |
public class AppServiceEnvironmentsInner { /** * Get all App Service plans in an App Service Environment .
* Get all App Service plans in an App Service Environment .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; AppServicePlanInner & gt ; object */
public Observable < ServiceResponse < Page < AppServicePlanInner > > > listAppServicePlansNextWithServiceResponseAsync ( final String nextPageLink ) { } } | return listAppServicePlansNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < AppServicePlanInner > > , Observable < ServiceResponse < Page < AppServicePlanInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < AppServicePlanInner > > > call ( ServiceResponse < Page < AppServicePlanInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listAppServicePlansNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ; |
public class CPSpecificationOptionPersistenceImpl { /** * Clears the cache for the cp specification option .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( CPSpecificationOption cpSpecificationOption ) { } } | entityCache . removeResult ( CPSpecificationOptionModelImpl . ENTITY_CACHE_ENABLED , CPSpecificationOptionImpl . class , cpSpecificationOption . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; clearUniqueFindersCache ( ( CPSpecificationOptionModelImpl ) cpSpecificationOption , true ) ; |
public class DirPattern { /** * Simple regexp filter mechanism for controlling which directories and / or files are included in the zipfile .
* Include Patterns override Exclude Patterns .
* @ param file
* @ param excludePattern
* @ param includePattern
* @ return if the file should be included . */
static boolean includePreference ( File file , Set < Pattern > excludePattern , Set < Pattern > includePattern , boolean includeByDefault ) { } } | boolean include = includeByDefault ; // Iterate over exclude patterns , if there is any match , exclude
if ( include ) { for ( Pattern pattern : excludePattern ) { Matcher excludeMatcher = pattern . matcher ( file . getAbsolutePath ( ) ) ; if ( excludeMatcher . find ( ) ) { include = false ; break ; } } } // Iterate over include patterns , if there is any match , include
if ( ! include ) { for ( Pattern pattern : includePattern ) { Matcher includeMatcher = pattern . matcher ( file . getAbsolutePath ( ) ) ; if ( includeMatcher . find ( ) ) { include = true ; // If we are here , we are overriding an exclude
break ; } } } return include ; |
public class QueryCriteriaUtil { /** * Depending on whether or not the given { @ link QueryCriteria } is a group criteria ( which then contains a { @ link List } < { @ link QueryCriteria } > )
* or a single { @ link QueryCriteria } , the correct method to process the given { @ link QueryCriteria } is called .
* @ param query The { @ link CriteriaQuery } that is being built
* @ param criteria The { @ link QueryCriteria } instance
* @ param builder The { @ link CriteriaBuilder } builder instance
* @ param queryType The ( persistent { @ link Entity } ) { @ link Class } that we are querying on
* @ return A { @ link Predicate } created on the basis of the given { @ link QueryCriteria } instance */
private < R , T > Predicate createPredicateFromSingleOrGroupCriteria ( CriteriaQuery < R > query , CriteriaBuilder builder , Class < T > queryType , QueryCriteria criteria , QueryWhere queryWhere ) { } } | Predicate predicate ; if ( criteria . isGroupCriteria ( ) ) { assert ! criteria . hasValues ( ) : "Criteria has both subcriteria (group criteria) and values! [" + criteria . toString ( ) + "]" ; predicate = createPredicateFromCriteriaList ( query , builder , queryType , criteria . getCriteria ( ) , queryWhere ) ; } else { assert ! criteria . hasCriteria ( ) || Integer . parseInt ( criteria . getListId ( ) ) < 0 : "Criteria has both values and subcriteria (group criteria)! [" + criteria . toString ( ) + "]" ; predicate = createPredicateFromSingleCriteria ( query , builder , queryType , criteria , queryWhere ) ; } return predicate ; |
public class SerializedFormBuilder { /** * Build the field sub header .
* @ param node the XML element that specifies which components to document
* @ param fieldsContentTree content tree to which the documentation will be added */
public void buildFieldSubHeader ( XMLNode node , Content fieldsContentTree ) { } } | if ( ! currentClass . definesSerializableFields ( ) ) { FieldDoc field = ( FieldDoc ) currentMember ; fieldWriter . addMemberHeader ( field . type ( ) . asClassDoc ( ) , field . type ( ) . typeName ( ) , field . type ( ) . dimension ( ) , field . name ( ) , fieldsContentTree ) ; } |
public class DescribePointSurfPlanar { /** * Specifies input image shapes .
* @ param grayII integral image of gray scale image
* @ param colorII integral image of color image */
public void setImage ( II grayII , Planar < II > colorII ) { } } | InputSanityCheck . checkSameShape ( grayII , colorII ) ; if ( colorII . getNumBands ( ) != numBands ) throw new IllegalArgumentException ( "Expected planar images to have " + numBands + " not " + colorII . getNumBands ( ) ) ; this . grayII = grayII ; this . colorII = colorII ; |
public class PortletRendererImpl { /** * Replay the cached content inside the { @ link CachedPortletData } as the response to a doRender . */
protected PortletRenderResult doRenderReplayCachedContent ( IPortletWindow portletWindow , HttpServletRequest httpServletRequest , CacheState < CachedPortletData < PortletRenderResult > , PortletRenderResult > cacheState , PortletOutputHandler portletOutputHandler , RenderPart renderPart , long baseExecutionTime ) throws IOException { } } | enforceConfigPermission ( httpServletRequest , portletWindow ) ; logger . debug ( "Replaying cached content for Render {} request to {}" , renderPart , portletWindow ) ; final long renderStartTime = System . nanoTime ( ) ; final CachedPortletData < PortletRenderResult > cachedPortletData = cacheState . getCachedPortletData ( ) ; cachedPortletData . replay ( portletOutputHandler ) ; final long executionTime = baseExecutionTime + ( System . nanoTime ( ) - renderStartTime ) ; publishRenderEvent ( portletWindow , httpServletRequest , renderPart , executionTime , true ) ; final PortletRenderResult portletResult = cachedPortletData . getPortletResult ( ) ; return new PortletRenderResult ( portletResult , executionTime ) ; |
public class RC4 { /** * 交换指定两个位置的值
* @ param i 位置1
* @ param j 位置2
* @ param sbox 数组 */
private void swap ( int i , int j , int [ ] sbox ) { } } | int temp = sbox [ i ] ; sbox [ i ] = sbox [ j ] ; sbox [ j ] = temp ; |
public class AddMojo { /** * region Entry Point */
@ Override protected void doExecute ( ) throws Exception { } } | final List < FunctionTemplate > templates = loadAllFunctionTemplates ( ) ; final FunctionTemplate template = getFunctionTemplate ( templates ) ; final BindingTemplate bindingTemplate = loadBindingTemplate ( template . getTriggerType ( ) ) ; final Map params = prepareRequiredParameters ( template , bindingTemplate ) ; final String newFunctionClass = substituteParametersInTemplate ( template , params ) ; saveNewFunctionToFile ( newFunctionClass ) ; |
public class JsonConfig { /** * Registers a JsonValueProcessor . < br >
* [ Java - & gt ; JSON ]
* @ param beanClass the class to use as key
* @ param propertyType the property type to use as key
* @ param jsonValueProcessor the processor to register */
public void registerJsonValueProcessor ( Class beanClass , Class propertyType , JsonValueProcessor jsonValueProcessor ) { } } | if ( beanClass != null && propertyType != null && jsonValueProcessor != null ) { beanTypeMap . put ( beanClass , propertyType , jsonValueProcessor ) ; } |
public class EphemeralKafkaBroker { /** * Create a minimal consumer configuration . Offset is set to " earliest " .
* @ param enableAutoCommit Enable auto commit
* @ return Properties */
public Properties consumerConfig ( boolean enableAutoCommit ) { } } | Properties props = new Properties ( ) ; props . put ( "bootstrap.servers" , LOCALHOST + ":" + kafkaPort ) ; props . put ( "group.id" , "kafka-junit-consumer" ) ; props . put ( "enable.auto.commit" , String . valueOf ( enableAutoCommit ) ) ; props . put ( "auto.commit.interval.ms" , "10" ) ; props . put ( "auto.offset.reset" , "earliest" ) ; props . put ( "heartbeat.interval.ms" , "100" ) ; props . put ( "session.timeout.ms" , "200" ) ; props . put ( "fetch.max.wait.ms" , "200" ) ; props . put ( "metadata.max.age.ms" , "100" ) ; return props ; |
public class SQLFunctions { /** * cast ( year as int ) */
private static String convertType ( SQLExpr script ) { } } | String [ ] variance = Util . expr2Object ( script ) . toString ( ) . split ( ";" ) ; String newScript = variance [ variance . length - 1 ] ; if ( newScript . trim ( ) . startsWith ( "def " ) ) { // for now , if variant is string , then change to double .
String temp = newScript . trim ( ) . substring ( 4 ) . split ( "=" ) [ 0 ] . trim ( ) ; return " if( " + temp + " instanceof String) " + temp + "= Double.parseDouble(" + temp . trim ( ) + "); " ; } else return "" ; |
public class NetworkServiceDescriptorAgent { /** * Add a new VNFDependency to a specific NetworkServiceDescriptor .
* @ param idNSD the ID of the NetworkServiceDescriptor
* @ param vnfDependency the new VNFDependency
* @ return the new VNFDependency
* @ throws SDKException if the request fails */
@ Help ( help = "Create the VirtualNetworkFunctionDescriptor dependency of a NetworkServiceDescriptor with specific id" ) public VNFDependency createVNFDependency ( final String idNSD , final VNFDependency vnfDependency ) throws SDKException { } } | String url = idNSD + "/vnfdependencies" + "/" ; return ( VNFDependency ) requestPost ( url , vnfDependency ) ; |
public class CmsAfterPublishStaticExportHandler { /** * Creates a list of < code > { @ link org . opencms . db . CmsPublishedResource } < / code > objects containing all related resources of the VFS tree . < p >
* If the static export has been triggered by the OpenCms workplace , publishedResources is null and all resources in the VFS tree are returned . < p >
* If really an after publish static export is triggered , then only the related resources are returned . < p >
* @ param cms the current cms object
* @ param publishedResources the list of published resources
* @ return list of CmsPulishedResource objects containing all resources of the VFS tree
* @ throws CmsException in case of errors accessing the VFS */
protected List < CmsPublishedResource > getRelatedResources ( CmsObject cms , List < CmsPublishedResource > publishedResources ) throws CmsException { } } | String storedSiteRoot = cms . getRequestContext ( ) . getSiteRoot ( ) ; try { // switch to root site
cms . getRequestContext ( ) . setSiteRoot ( "/" ) ; if ( publishedResources == null ) { // full static export
return getAllResources ( cms ) ; } else { // after publish export
Map < String , CmsPublishedResource > resourceMap = new HashMap < String , CmsPublishedResource > ( ) ; Iterator < CmsPublishedResource > itPubRes = publishedResources . iterator ( ) ; while ( itPubRes . hasNext ( ) ) { CmsPublishedResource pubResource = itPubRes . next ( ) ; // check the internal flag if the resource does still exist
// we cannot export with an internal flag
if ( cms . existsResource ( pubResource . getRootPath ( ) ) ) { CmsResource vfsResource = cms . readResource ( pubResource . getRootPath ( ) ) ; if ( ! vfsResource . isInternal ( ) ) { // add only if not internal
// additionally , add all siblings of the resource
Iterator < CmsPublishedResource > itSiblings = getSiblings ( cms , pubResource ) . iterator ( ) ; while ( itSiblings . hasNext ( ) ) { CmsPublishedResource sibling = itSiblings . next ( ) ; resourceMap . put ( sibling . getRootPath ( ) , sibling ) ; } } } else { // the resource does not exist , so add them for deletion in the static export
resourceMap . put ( pubResource . getRootPath ( ) , pubResource ) ; } boolean match = false ; Iterator < CmsStaticExportExportRule > itExportRules = OpenCms . getStaticExportManager ( ) . getExportRules ( ) . iterator ( ) ; while ( itExportRules . hasNext ( ) ) { CmsStaticExportExportRule rule = itExportRules . next ( ) ; Set < CmsPublishedResource > relatedResources = rule . getRelatedResources ( cms , pubResource ) ; if ( relatedResources != null ) { Iterator < CmsPublishedResource > itRelatedRes = relatedResources . iterator ( ) ; while ( itRelatedRes . hasNext ( ) ) { CmsPublishedResource relatedRes = itRelatedRes . next ( ) ; resourceMap . put ( relatedRes . getRootPath ( ) , relatedRes ) ; } match = true ; } } // if one res does not match any rule , then export all files
if ( ! match ) { return getAllResources ( cms ) ; } } return new ArrayList < CmsPublishedResource > ( resourceMap . values ( ) ) ; } } finally { cms . getRequestContext ( ) . setSiteRoot ( storedSiteRoot ) ; } |
public class Bean { /** * Return the list of property names which are references .
* @ return list of property names . */
public List < String > getReferenceNames ( ) { } } | ArrayList < String > names = new ArrayList < > ( references . keySet ( ) ) ; Collections . sort ( names ) ; return names ; |
public class BrowserApplicationCache { /** * Adds the browser application list to the browser applications for the account .
* @ param browserApplications The browser applications to add */
public void add ( Collection < BrowserApplication > browserApplications ) { } } | for ( BrowserApplication browserApplication : browserApplications ) this . browserApplications . put ( browserApplication . getId ( ) , browserApplication ) ; |
public class PluggableMethodArgCoercer { /** * / * setters */
public void setConvertors ( Map < Class < ? > , Tuple2 < ObjectCoercer < Object , Object , Exception > , Integer > > convertors ) { } } | this . convertors . clear ( ) ; this . convertors . putAll ( convertors ) ; |
public class PropertyUtil { /** * This method returns the property as an integer value .
* @ return The property as a boolean , or false if not found */
public static boolean getPropertyAsBoolean ( String name ) { } } | String value = getProperty ( name ) ; if ( value != null && value . equalsIgnoreCase ( "true" ) ) { return true ; } return false ; |
public class LabAccountsInner { /** * Get lab account .
* @ param resourceGroupName The name of the resource group .
* @ param labAccountName The name of the lab Account .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < LabAccountInner > getByResourceGroupAsync ( String resourceGroupName , String labAccountName , final ServiceCallback < LabAccountInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getByResourceGroupWithServiceResponseAsync ( resourceGroupName , labAccountName ) , serviceCallback ) ; |
public class UnpackingSoSource { /** * Prepare this SoSource extracting a corrupted library . */
protected synchronized void prepare ( String soName ) throws IOException { } } | // Only one thread at a time can try to recover a corrupted lib from the same source
Object lock = getLibraryLock ( soName ) ; synchronized ( lock ) { // While recovering , do not allow loading the same lib from another thread
mCorruptedLib = soName ; prepare ( SoSource . PREPARE_FLAG_FORCE_REFRESH ) ; } |
public class AndroidCryptoUtils { /** * / * ( non - Javadoc )
* @ see zorg . platform . CryptoUtils # aesDecrypt ( byte [ ] , int , int , byte [ ] , byte [ ] ) */
@ Override public byte [ ] aesDecrypt ( byte [ ] data , int offset , int length , byte [ ] key , byte [ ] initVector ) throws CryptoException { } } | try { SecretKeySpec scs = new SecretKeySpec ( key , "AES" ) ; Cipher cipher = Cipher . getInstance ( "AES/CFB/NoPadding" , "ZBC" ) ; IvParameterSpec iv = new IvParameterSpec ( initVector ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( length ) ; cipher . init ( Cipher . DECRYPT_MODE , scs , iv ) ; CipherOutputStream out = new CipherOutputStream ( baos , cipher ) ; out . write ( data , offset , length ) ; out . close ( ) ; baos . close ( ) ; return baos . toByteArray ( ) ; } catch ( Exception e ) { throw new CryptoException ( e ) ; } |
public class Types { /** * Returns a boolean representation of a given object instance . This predicate behavior resemble
* < code > JavaScript < / code > cast of object to boolean value . Here are tested conditions :
* < ul >
* < li > if { @ link # isBoolean ( Object ) } returns boolean value ,
* < li > if { @ link # isNumber ( Object ) } returns true if number is not zero ,
* < li > if instance is string returns true if string is not empty ,
* < li > if { @ link # isArray ( Object ) } , { @ link # isCollection ( Object ) } or { @ link # isMap ( Object ) } returns true if is not
* empty ,
* < li > if { @ link # isCharacter ( Object ) } returns true if character is defined .
* < / ul >
* If object instance is null returns false .
* @ param o object instance to interpret as boolean .
* @ return boolean representation of object instance . */
public static boolean asBoolean ( Object o ) { } } | if ( o == null ) { return false ; } if ( isBoolean ( o ) ) { return ( Boolean ) o ; } if ( isNumber ( o ) ) { return ( ( Number ) o ) . byteValue ( ) != 0 ; } if ( o instanceof String ) { return ! ( ( String ) o ) . isEmpty ( ) ; } if ( isArray ( o ) ) { return Array . getLength ( o ) > 0 ; } if ( isCollection ( o ) ) { return ( ( Collection < ? > ) o ) . size ( ) > 0 ; } if ( isMap ( o ) ) { return ( ( Map < ? , ? > ) o ) . size ( ) > 0 ; } if ( isCharacter ( o ) ) { return Character . isDefined ( ( Character ) o ) ; } return true ; |
public class PeerSocketHandler { /** * Sends the given message to the peer . Due to the asynchronousness of network programming , there is no guarantee
* the peer will have received it . Throws NotYetConnectedException if we are not yet connected to the remote peer .
* TODO : Maybe use something other than the unchecked NotYetConnectedException here */
public void sendMessage ( Message message ) throws NotYetConnectedException { } } | lock . lock ( ) ; try { if ( writeTarget == null ) throw new NotYetConnectedException ( ) ; } finally { lock . unlock ( ) ; } // TODO : Some round - tripping could be avoided here
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; try { serializer . serialize ( message , out ) ; writeTarget . writeBytes ( out . toByteArray ( ) ) ; } catch ( IOException e ) { exceptionCaught ( e ) ; } |
public class FullSegmentation { /** * 从树叶开始反向遍历生成全切分结果
* @ param leaf 树叶节点集合
* @ return 全切分结果集合 */
private List < Word > [ ] toWords ( List < Node > leaf ) { } } | List < Word > [ ] result = new ArrayList [ leaf . size ( ) ] ; int i = 0 ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "全切分结果:" ) ; } for ( Node node : leaf ) { result [ i ++ ] = toWords ( node ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "\t" + i + ":" + result [ i - 1 ] ) ; } } return result ; |
public class RRBudgetV1_0Generator { /** * This method gets Equipment details such as
* EquipmentItem , FundsRequested , TotalFundForAttachedEquipment , TotalFund and
* AdditionalEquipmentsAttachment based on BudgetPeriodInfo for the
* RRBudget .
* @ param periodInfo
* ( BudgetPeriodInfo ) budget period entry .
* @ return Equipment costs corresponding to the BudgetPeriodInfo object . */
private Equipment getEquipment ( BudgetPeriodDto periodInfo ) { } } | Equipment equipment = Equipment . Factory . newInstance ( ) ; NarrativeContract extraEquipmentNarr = null ; if ( periodInfo != null && periodInfo . getEquipment ( ) != null && periodInfo . getEquipment ( ) . size ( ) > 0 ) { // Evaluating Equipments .
List < EquipmentList > equipmentArrayList = new ArrayList < > ( ) ; ScaleTwoDecimal totalFund = ScaleTwoDecimal . ZERO ; for ( CostDto costInfo : periodInfo . getEquipment ( ) . get ( 0 ) . getEquipmentList ( ) ) { EquipmentList equipmentList = EquipmentList . Factory . newInstance ( ) ; equipmentList . setEquipmentItem ( costInfo . getDescription ( ) ) ; if ( costInfo . getCost ( ) != null ) { equipmentList . setFundsRequested ( costInfo . getCost ( ) . bigDecimalValue ( ) ) ; } totalFund = totalFund . add ( costInfo . getCost ( ) ) ; equipmentArrayList . add ( equipmentList ) ; } // Evaluating Extra Equipments .
List < CostDto > extraEquipmentArrayList = new ArrayList < > ( ) ; ScaleTwoDecimal totalExtraEquipFund = ScaleTwoDecimal . ZERO ; for ( CostDto costInfo : periodInfo . getEquipment ( ) . get ( 0 ) . getExtraEquipmentList ( ) ) { extraEquipmentArrayList . add ( costInfo ) ; totalExtraEquipFund = totalExtraEquipFund . add ( costInfo . getCost ( ) ) ; } EquipmentList [ ] equipmentArray = new EquipmentList [ 0 ] ; equipmentArray = equipmentArrayList . toArray ( equipmentArray ) ; equipment . setEquipmentListArray ( equipmentArray ) ; TotalFundForAttachedEquipment totalFundForAttachedEquipment = TotalFundForAttachedEquipment . Factory . newInstance ( ) ; totalFundForAttachedEquipment . setTotalFundForAttachedEquipmentExist ( YesNoDataType . YES ) ; totalFundForAttachedEquipment . setBigDecimalValue ( periodInfo . getEquipment ( ) . get ( 0 ) . getTotalExtraFund ( ) . bigDecimalValue ( ) ) ; totalFund = totalFund . add ( totalExtraEquipFund ) ; equipment . setTotalFundForAttachedEquipment ( totalFundForAttachedEquipment ) ; equipment . setTotalFund ( totalFund . bigDecimalValue ( ) ) ; extraEquipmentNarr = saveAdditionalEquipments ( periodInfo , extraEquipmentArrayList ) ; } if ( extraEquipmentNarr != null ) { AdditionalEquipmentsAttachment equipmentAttachment = AdditionalEquipmentsAttachment . Factory . newInstance ( ) ; FileLocation fileLocation = FileLocation . Factory . newInstance ( ) ; equipmentAttachment . setFileLocation ( fileLocation ) ; String contentId = createContentId ( extraEquipmentNarr ) ; fileLocation . setHref ( contentId ) ; equipmentAttachment . setFileLocation ( fileLocation ) ; equipmentAttachment . setFileName ( extraEquipmentNarr . getNarrativeAttachment ( ) . getName ( ) ) ; equipmentAttachment . setMimeType ( InfastructureConstants . CONTENT_TYPE_OCTET_STREAM ) ; equipmentAttachment . setHashValue ( getHashValue ( extraEquipmentNarr . getNarrativeAttachment ( ) . getData ( ) ) ) ; AttachmentData attachmentData = new AttachmentData ( ) ; attachmentData . setContent ( extraEquipmentNarr . getNarrativeAttachment ( ) . getData ( ) ) ; attachmentData . setContentId ( contentId ) ; attachmentData . setContentType ( InfastructureConstants . CONTENT_TYPE_OCTET_STREAM ) ; attachmentData . setFileName ( extraEquipmentNarr . getNarrativeAttachment ( ) . getName ( ) ) ; addAttachment ( attachmentData ) ; equipmentAttachment . setTotalFundForAttachedEquipmentExist ( YesNoDataType . YES ) ; equipment . setAdditionalEquipmentsAttachment ( equipmentAttachment ) ; } return equipment ; |
public class TiffReader { /** * Gets the value of the given tag field .
* @ param tagtype the tag type
* @ param n the cardinality
* @ param id the tag id
* @ param beginOffset the offset position of the tag value
* @ param parentIFD the parent ifd
* @ param nifd the ifd number
* @ return the tag value object */
protected TagValue getValue ( int tagtype , int n , int id , int beginOffset , IFD parentIFD , int nifd ) { } } | int type = tagtype ; if ( id == 330 && type != 13 ) type = 13 ; // Create TagValue object
TagValue tv = new TagValue ( id , type ) ; tv . setTagOffset ( beginOffset - 8 ) ; // Defined tags
int offset = beginOffset ; // Get type Size
int typeSize = TiffTags . getTypeSize ( type ) ; boolean ok = true ; // Check if the tag value fits in the directory entry value field , and get offset if not
if ( typeSize * n > tagValueSize ) { try { offset = data . readLong ( offset ) . toInt ( ) ; if ( offset % 2 != 0 ) { validation . addErrorLoc ( "Bad word alignment in the offset of tag " + id , "IFD" + n ) ; } } catch ( Exception ex ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + n ) ; ok = false ; } } tv . setReadOffset ( offset ) ; tv . setReadLength ( n ) ; if ( ok ) { try { for ( int i = 0 ; i < n ; i ++ ) { // Get N tag values
switch ( type ) { case 1 : tv . add ( data . readByte ( offset ) ) ; break ; case 2 : tv . add ( data . readAscii ( offset ) ) ; break ; case 6 : tv . add ( data . readSByte ( offset ) ) ; break ; case 7 : tv . add ( data . readUndefined ( offset ) ) ; break ; case 3 : tv . add ( data . readShort ( offset ) ) ; break ; case 8 : tv . add ( data . readSShort ( offset ) ) ; break ; case 4 : tv . add ( data . readLong ( offset ) ) ; break ; case 9 : tv . add ( data . readSLong ( offset ) ) ; break ; case 5 : tv . add ( data . readRational ( offset ) ) ; break ; case 10 : tv . add ( data . readSRational ( offset ) ) ; break ; case 11 : tv . add ( data . readFloat ( offset ) ) ; break ; case 12 : tv . add ( data . readDouble ( offset ) ) ; break ; case 13 : int ifdOffset = data . readLong ( offset ) . toInt ( ) ; if ( ifdOffset % 2 != 0 ) { validation . addErrorLoc ( "Bad word alignment in the offset of the sub IFD" , "IFD" + n ) ; } IfdReader ifd = readIFD ( ifdOffset , true , - nifd ) ; IFD subIfd = ifd . getIfd ( ) ; subIfd . setParent ( parentIFD ) ; parentIFD . setsubIFD ( subIfd ) ; tv . add ( subIfd ) ; break ; } offset += typeSize ; } } catch ( Exception ex ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; ok = false ; } } if ( type == 2 ) { tv . readString ( ) ; } if ( ok && TiffTags . hasTag ( id ) ) { Tag t = TiffTags . getTag ( id ) ; if ( t . hasTypedef ( ) && ! t . getTypedef ( ) . equals ( "SubIFD" ) ) { String tagclass = t . getTypedef ( ) ; try { abstractTiffType instanceOfMyClass = ( abstractTiffType ) Class . forName ( "com.easyinnova.tiff.model.types." + tagclass ) . getConstructor ( ) . newInstance ( ) ; if ( instanceOfMyClass . isIFD ( ) ) { long ifdOffset = tv . getFirstNumericValue ( ) ; try { if ( ifdOffset % 2 != 0 ) { validation . addErrorLoc ( "Bad word alignment in the offset of Exif" , "IFD" + n ) ; } IfdReader ifd = readIFD ( ( int ) ifdOffset , false , - 1 ) ; IFD exifIfd = ifd . getIfd ( ) ; exifIfd . setIsIFD ( true ) ; tv . clear ( ) ; tv . add ( exifIfd ) ; } catch ( Exception ex ) { validation . addErrorLoc ( "Parse error in Exif" , "IFD" + nifd ) ; } } else { if ( tv . getId ( ) == 33723 ) instanceOfMyClass . read ( tv , data . getFilePath ( ) ) ; else instanceOfMyClass . read ( tv ) ; } } catch ( ClassNotFoundException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( NoSuchMethodException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( SecurityException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( InstantiationException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( IllegalAccessException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( IllegalArgumentException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( InvocationTargetException e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } catch ( Exception e ) { validation . addErrorLoc ( "Parse error getting tag " + id + " value" , "IFD" + nifd ) ; } } } if ( ok ) tv . setReadValue ( ) ; return tv ; |
public class QueueConsumerGroup { /** * Stop all the working threads one by one ;
* This method will not return until all threads are stopped . < br >
* 逐个停止所工作线程 , 这个方法会等到所有工作线程结束才返回 。
* @ param afterQueueEmptytrue if working thread should keep processing until the queue is empty ; < br >
* false if working thread should stop after finished current work ; < br >
* 如果为true , 则工作线程要等到队列处理空了才结束 ; < br >
* 如果为false , 则工作线程处理完当前数据就结束 。 */
public void stop ( boolean afterQueueEmpty ) { } } | for ( QueueConsumer < E > c : consumers . values ( ) ) { c . preStop ( afterQueueEmpty ) ; } for ( QueueConsumer < E > c : consumers . values ( ) ) { c . stop ( afterQueueEmpty ) ; } |
public class FastStr { /** * Wrapper of { @ link String # toUpperCase ( java . util . Locale ) } but return FastStr type instance
* @ param locale the locale
* @ return a FastStr instance with all characters from this FastStr
* be converted into uppercase based on the locale specified */
@ Override public FastStr toUpperCase ( Locale locale ) { } } | String s = toString ( ) ; return unsafeOf ( s . toUpperCase ( locale ) ) ; |
public class PropertiesAdapter { /** * Gets the assigned value of the named property as an instance of the specified { @ link Class } type .
* @ param < T > { @ link Class } type of the return value .
* @ param propertyName the name of the property to get .
* @ param type Class type of the value to return for the specified property .
* @ return the assigned value of the named property as an instance of the specified { @ link Class } type .
* @ see # getAsType ( String , Class , Object ) */
public < T > T getAsType ( String propertyName , Class < T > type ) { } } | return getAsType ( propertyName , type , null ) ; |
public class EntryStream { public static < E > EntryStream < E , Integer > of ( final Multiset < E > multiset ) { } } | return multiset == null ? EntryStream . < E , Integer > empty ( ) : multiset . entryStream ( ) ; |
public class EsMarshalling { /** * Marshals the given bean into the given map .
* @ param bean the bean
* @ return the content builder
* @ throws StorageException when a storage problem occurs while storing a bean */
public static XContentBuilder marshall ( UserBean bean ) throws StorageException { } } | try ( XContentBuilder builder = XContentFactory . jsonBuilder ( ) ) { preMarshall ( bean ) ; builder . startObject ( ) . field ( "username" , bean . getUsername ( ) ) . field ( "email" , bean . getEmail ( ) ) . field ( "fullName" , bean . getFullName ( ) ) . field ( "joinedOn" , bean . getJoinedOn ( ) == null ? null : bean . getJoinedOn ( ) . getTime ( ) ) . endObject ( ) ; postMarshall ( bean ) ; return builder ; } catch ( IOException e ) { throw new StorageException ( e ) ; } |
public class AbstractHttpWriter { /** * Default implementation is to use HttpClients socket timeout which is waiting based on elapsed time between
* last packet sent from client till receive it from server .
* { @ inheritDoc }
* @ see org . apache . gobblin . writer . http . HttpWriterDecoration # waitForResponse ( com . google . common . util . concurrent . ListenableFuture ) */
@ Override public CloseableHttpResponse waitForResponse ( ListenableFuture < CloseableHttpResponse > responseFuture ) { } } | try { return responseFuture . get ( ) ; } catch ( InterruptedException | ExecutionException e ) { throw new RuntimeException ( e ) ; } |
public class AsyncUpdateThread { /** * Enqueue an AsyncUpdate
* @ param unit the AsyncUpdate */
public void enqueueWork ( AsyncUpdate unit ) throws ClosedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "enqueueWork" , unit ) ; synchronized ( this ) { if ( closed ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "enqueueWork" , "ClosedException" ) ; throw new ClosedException ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Enqueueing update: " + unit ) ; enqueuedUnits . add ( unit ) ; if ( executing ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "enqueueWork" , "AsyncUpdateThread executing" ) ; return ; } // not executing enqueued updates
if ( enqueuedUnits . size ( ) > batchThreshold ) { executeSinceExpiry = true ; try { startExecutingUpdates ( ) ; } catch ( ClosedException e ) { // No FFDC code needed
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "enqueueWork" , e ) ; throw e ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "enqueueWork" ) ; |
public class EJSContainer { /** * Returns a reference to the EJBHome associated with a specified J2EEName .
* @ param name is the J2EEName of the EJB ' s home object .
* @ exception ContainerEJBException if any Throwable occurs that
* prevented this method from return EJBHome . Use the getCause
* method to recover the Throwable that occured . */
public EJBHome getEJBHome ( J2EEName name ) throws ContainerEJBException { } } | try { EJSWrapperCommon wrapperCommon = getHomeWrapperCommon ( name ) ; EJSWrapper wrapper = wrapperCommon . getRemoteWrapper ( ) ; return ( EJBHome ) PortableRemoteObject . toStub ( wrapper ) ; // PM35068
} catch ( RemoteException re ) { FFDCFilter . processException ( re , CLASS_NAME + ".getEJBHome" , "2955" , this ) ; if ( re . detail == null ) { throw new ContainerEJBException ( "Could not get EJBHome" , re ) ; } else { throw new ContainerEJBException ( "Could not get EJBHome" , re . detail ) ; } } catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".getEJBHome" , "2967" , this ) ; throw new ContainerEJBException ( "Could not get EJBHome" , t ) ; } |
public class ST_Svf { /** * The method to compute the Sky View Factor
* @ param pt
* @ param distance
* @ param rayCount number of rays
* @ param stepRayLength length of sub ray used to limit the number of geometries when requested
* @ param geoms
* @ return */
public static double computeSvf ( Point pt , Geometry geoms , double distance , int rayCount , int stepRayLength ) { } } | double svf = - 1 ; if ( pt == null ) { return svf ; } if ( geoms == null ) { return svf ; } if ( distance <= 0 ) { throw new IllegalArgumentException ( "The distance value must be greater than 0" ) ; } if ( rayCount < 4 ) { throw new IllegalArgumentException ( "The number of rays must be greater than or equal to 4" ) ; } if ( stepRayLength <= 0 ) { throw new IllegalArgumentException ( "The ray length parameter must be greater than 0" ) ; } RAY_STEP_LENGTH = stepRayLength ; if ( geoms . getDimension ( ) > 0 ) { GeometryFactory factory = pt . getFactory ( ) ; // Convert input geoms to a set of linestring
STRtree sTRtree = new STRtree ( ) ; int nbGeoms = geoms . getNumGeometries ( ) ; for ( int i = 0 ; i < nbGeoms ; i ++ ) { Geometry subGeom = geoms . getGeometryN ( i ) ; if ( subGeom instanceof LineString ) { addSegments ( subGeom . getCoordinates ( ) , factory , sTRtree ) ; } else if ( subGeom instanceof Polygon ) { Polygon p = ( Polygon ) subGeom ; addSegments ( p . getExteriorRing ( ) . getCoordinates ( ) , factory , sTRtree ) ; int nbInterior = p . getNumInteriorRing ( ) ; for ( int j = 0 ; j < nbInterior ; j ++ ) { addSegments ( p . getInteriorRingN ( j ) . getCoordinates ( ) , factory , sTRtree ) ; } } } Coordinate startCoordinate = pt . getCoordinate ( ) ; double startZ = Double . isNaN ( startCoordinate . z ) ? 0 : startCoordinate . z ; double sumArea = 2 * Math . PI ; double elementaryAngle = sumArea / rayCount ; int stepCount = ( int ) Math . round ( distance / RAY_STEP_LENGTH ) ; double stepLength = distance / stepCount ; // Compute the SVF for each ray according an angle
for ( int i = 0 ; i < rayCount ; i += 1 ) { // To limit the number of geometries in the query with create a progressive ray
Vector2D vStart = new Vector2D ( startCoordinate ) ; double angleRad = elementaryAngle * i ; Vector2D v = Vector2D . create ( Math . cos ( angleRad ) , Math . sin ( angleRad ) ) ; // This is the translation vector
v = v . multiply ( stepLength ) ; double max = 0 ; for ( int j = 0 ; j < stepCount ; j ++ ) { LineSegment stepLine = new LineSegment ( vStart . add ( v . multiply ( j ) ) . toCoordinate ( ) , vStart . add ( v . multiply ( j + 1 ) ) . toCoordinate ( ) ) ; LineString rayStep = stepLine . toGeometry ( factory ) ; List < LineString > interEnv = sTRtree . query ( rayStep . getEnvelopeInternal ( ) ) ; if ( ! interEnv . isEmpty ( ) ) { for ( LineString lineGeoms : interEnv ) { Coordinate [ ] coords = lineGeoms . getCoordinates ( ) ; Coordinate coordsStart = coords [ 0 ] ; Coordinate coordsEnd = coords [ 1 ] ; if ( Math . max ( coordsStart . z , coordsEnd . z ) > max * j * stepLength ) { Geometry ptsIntersect = lineGeoms . intersection ( rayStep ) ; if ( ptsIntersect instanceof Point && ptsIntersect != null ) { double coordWithZ = CoordinateUtils . interpolate ( lineGeoms . getCoordinateN ( 0 ) , lineGeoms . getCoordinateN ( 1 ) , ptsIntersect . getCoordinate ( ) ) ; double distancePoint = ptsIntersect . distance ( pt ) ; double ratio = ( coordWithZ - startZ ) / distancePoint ; if ( ratio > max ) { max = ratio ; } } } } } } double sinTheta = Math . sin ( Math . atan ( max ) ) ; sumArea -= elementaryAngle * sinTheta * sinTheta ; } svf = sumArea / ( 2 * Math . PI ) ; } return svf ; |
public class CreateNotebookInstanceRequest { /** * A list of Elastic Inference ( EI ) instance types to associate with this notebook instance . Currently , only one
* instance type can be associated with a notebook instance . For more information , see < a
* href = " http : / / docs . aws . amazon . com / sagemaker / latest / dg / ei . html " > Using Elastic Inference in Amazon SageMaker < / a > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAcceleratorTypes ( java . util . Collection ) } or { @ link # withAcceleratorTypes ( java . util . Collection ) } if you
* want to override the existing values .
* @ param acceleratorTypes
* A list of Elastic Inference ( EI ) instance types to associate with this notebook instance . Currently , only
* one instance type can be associated with a notebook instance . For more information , see < a
* href = " http : / / docs . aws . amazon . com / sagemaker / latest / dg / ei . html " > Using Elastic Inference in Amazon
* SageMaker < / a > .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see NotebookInstanceAcceleratorType */
public CreateNotebookInstanceRequest withAcceleratorTypes ( String ... acceleratorTypes ) { } } | if ( this . acceleratorTypes == null ) { setAcceleratorTypes ( new java . util . ArrayList < String > ( acceleratorTypes . length ) ) ; } for ( String ele : acceleratorTypes ) { this . acceleratorTypes . add ( ele ) ; } return this ; |
public class TagGroup { /** * Returns the INPUT tag view in this group .
* @ return the INPUT state tag view or null if not exists */
protected TagView getInputTag ( ) { } } | if ( isAppendMode ) { final int inputTagIndex = getChildCount ( ) - 1 ; final TagView inputTag = getTagAt ( inputTagIndex ) ; if ( inputTag != null && inputTag . mState == TagView . STATE_INPUT ) { return inputTag ; } else { return null ; } } else { return null ; } |
public class EnvLoader { /** * Add listener .
* @ param listener object to listen for environment create / destroy
* @ param loader the context class loader */
public static void addEnvironmentListener ( EnvLoaderListener listener , ClassLoader loader ) { } } | for ( ; loader != null ; loader = loader . getParent ( ) ) { if ( loader instanceof EnvironmentClassLoader ) { ( ( EnvironmentClassLoader ) loader ) . addListener ( listener ) ; return ; } } /* if ( _ envSystemClassLoader ! = null ) {
_ envSystemClassLoader . addNotificationListener ( listener ) ;
return ; */
_globalEnvironmentListeners . add ( listener ) ; |
public class CmsTinyMCEWidget { /** * Returns the string representation of the tinyMCE options object . < p >
* @ param cms the OpenCms context
* @ param param the widget parameter
* @ return the string representation of the tinyMCE options object */
private String getTinyMceConfiguration ( CmsObject cms , I_CmsWidgetParameter param ) { } } | JSONObject result = new JSONObject ( ) ; CmsEditorDisplayOptions options = OpenCms . getWorkplaceManager ( ) . getEditorDisplayOptions ( ) ; Properties displayOptions = options . getDisplayOptions ( cms ) ; try { result . put ( "elements" , "ta_" + param . getId ( ) ) ; String editorHeight = getHtmlWidgetOption ( ) . getEditorHeight ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( editorHeight ) ) { editorHeight = editorHeight . replaceAll ( "px" , "" ) ; result . put ( "height" , editorHeight ) ; } if ( options . showElement ( "gallery.enhancedoptions" , displayOptions ) ) { result . put ( "cmsGalleryEnhancedOptions" , true ) ; } if ( options . showElement ( "gallery.usethickbox" , displayOptions ) ) { result . put ( "cmsGalleryUseThickbox" , true ) ; } Boolean pasteText = Boolean . valueOf ( OpenCms . getWorkplaceManager ( ) . getWorkplaceEditorManager ( ) . getEditorParameter ( cms , "tinymce" , "paste_text" ) ) ; result . put ( "paste_as_text" , pasteText ) ; result . put ( "fullpage" , getHtmlWidgetOption ( ) . isFullPage ( ) ) ; result . merge ( getToolbarJson ( ) , true , false ) ; result . put ( "language" , OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) . getLanguage ( ) ) ; // set CSS style sheet for current editor widget if configured
boolean cssConfigured = false ; String cssPath = "" ; if ( getHtmlWidgetOption ( ) . useCss ( ) ) { cssPath = getHtmlWidgetOption ( ) . getCssPath ( ) ; // set the CSS path to null ( the created configuration String passed to JS will not include this path then )
getHtmlWidgetOption ( ) . setCssPath ( null ) ; cssConfigured = true ; } else if ( OpenCms . getWorkplaceManager ( ) . getEditorCssHandlers ( ) . size ( ) > 0 ) { Iterator < I_CmsEditorCssHandler > i = OpenCms . getWorkplaceManager ( ) . getEditorCssHandlers ( ) . iterator ( ) ; try { // cast parameter to I _ CmsXmlContentValue
I_CmsXmlContentValue contentValue = ( I_CmsXmlContentValue ) param ; // now extract the absolute path of the edited resource
CmsFile editedResource = contentValue . getDocument ( ) . getFile ( ) ; String editedResourceSitePath = editedResource == null ? null : cms . getSitePath ( editedResource ) ; while ( i . hasNext ( ) ) { I_CmsEditorCssHandler handler = i . next ( ) ; if ( handler . matches ( cms , editedResourceSitePath ) ) { cssPath = handler . getUriStyleSheet ( cms , editedResourceSitePath ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( cssPath ) ) { cssConfigured = true ; } break ; } } } catch ( Exception e ) { // ignore , CSS could not be set
LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } List < String > contentCssLinks = new ArrayList < String > ( ) ; contentCssLinks . add ( OpenCms . getLinkManager ( ) . substituteLink ( cms , BASE_CONTENT_CSS ) ) ; if ( cssConfigured ) { contentCssLinks . add ( OpenCms . getLinkManager ( ) . substituteLink ( cms , cssPath ) ) ; } result . put ( "content_css" , CmsStringUtil . listAsString ( contentCssLinks , "," ) ) ; if ( getHtmlWidgetOption ( ) . showStylesFormat ( ) ) { try { CmsFile file = cms . readFile ( getHtmlWidgetOption ( ) . getStylesFormatPath ( ) ) ; String characterEncoding = OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ; result . put ( "style_formats" , new String ( file . getContents ( ) , characterEncoding ) ) ; } catch ( CmsException cmsException ) { LOG . error ( "Can not open file:" + getHtmlWidgetOption ( ) . getStylesFormatPath ( ) , cmsException ) ; } catch ( UnsupportedEncodingException ex ) { LOG . error ( ex ) ; } } String formatSelectOptions = getHtmlWidgetOption ( ) . getFormatSelectOptions ( ) ; if ( ! CmsStringUtil . isEmpty ( formatSelectOptions ) && ! getHtmlWidgetOption ( ) . isButtonHidden ( CmsHtmlWidgetOption . OPTION_FORMATSELECT ) ) { result . put ( "block_formats" , CmsHtmlWidget . getTinyMceBlockFormats ( formatSelectOptions ) ) ; } result . put ( "entity_encoding" , "named" ) ; result . put ( "entities" , "160,nbsp" ) ; } catch ( JSONException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return result . toString ( ) ; |
public class SipParser { /** * Consume a token , which according to RFC3261 section 25.1 Basic Rules is :
* token = 1 * ( alphanum / " - " / " . " / " ! " / " % " / " * " / " _ " / " + " / " ` " / " ' "
* @ param buffer
* @ return the buffer containing the token we consumed or null if nothing
* was consumed .
* @ throws IOException
* @ throws IndexOutOfBoundsException */
public static Buffer consumeToken ( final Buffer buffer ) throws IndexOutOfBoundsException , IOException { } } | final int count = getTokenCount ( buffer ) ; if ( count == 0 ) { return null ; } return buffer . readBytes ( count ) ; |
public class PushUpBooleanExpressionOptimizerImpl { /** * Explores the tree upwards from the node providing the expression , looking for a recipient node ,
* and optionally keeping track of projections to extend on the path from provider to recipient
* May optionally force propagation through the first encountered UnionNode ancestor . */
private Optional < PushUpBooleanExpressionProposal > makeNodeCentricProposal ( CommutativeJoinOrFilterNode providerNode , ImmutableExpression propagatedExpression , Optional < ImmutableExpression > nonPropagatedExpression , IntermediateQuery query , boolean propagateThroughNextUnionNodeAncestor ) { } } | Optional < JoinOrFilterNode > recipient ; ImmutableSet . Builder < ExplicitVariableProjectionNode > inbetweenProjectorsBuilder = ImmutableSet . builder ( ) ; QueryNode currentChildNode ; QueryNode currentParentNode = providerNode ; do { currentChildNode = currentParentNode ; currentParentNode = query . getParent ( currentParentNode ) . orElseThrow ( ( ) -> new InvalidIntermediateQueryException ( "This node must have a parent node" ) ) ; if ( currentParentNode == query . getRootNode ( ) ) { break ; } if ( currentParentNode instanceof ConstructionNode ) { /* keep track of Construction nodes on the path between provider and recipient */
inbetweenProjectorsBuilder . add ( ( ConstructionNode ) currentParentNode ) ; continue ; } if ( currentParentNode instanceof UnionNode ) { /* optionally propagate the expression through the first encountered UnionNode */
if ( propagateThroughNextUnionNodeAncestor ) { propagateThroughNextUnionNodeAncestor = false ; /* keep track of it as an inbetween projector */
inbetweenProjectorsBuilder . add ( ( ExplicitVariableProjectionNode ) currentParentNode ) ; continue ; } break ; } if ( currentParentNode instanceof LeftJoinNode && ( query . getOptionalPosition ( currentChildNode ) . orElseThrow ( ( ) -> new InvalidIntermediateQueryException ( "The child of a LeftJoin node must have a position" ) ) == RIGHT ) ) { /* Stop propagation when reaching a LeftJoinNode from its right branch ,
and select the leftJoinNode as recipient */
return Optional . of ( new PushUpBooleanExpressionProposalImpl ( propagatedExpression , ImmutableMap . of ( providerNode , nonPropagatedExpression ) , currentChildNode , Optional . of ( ( JoinOrFilterNode ) currentParentNode ) , inbetweenProjectorsBuilder . build ( ) ) ) ; } } while ( true ) ; // If no effective propagation
if ( currentChildNode == providerNode ) { return Optional . empty ( ) ; } // if we reach this point , the upward propagation up must have been blocked by a union or by the root
recipient = currentChildNode instanceof CommutativeJoinOrFilterNode ? Optional . of ( ( CommutativeJoinOrFilterNode ) currentChildNode ) : Optional . empty ( ) ; PushUpBooleanExpressionProposal proposal = new PushUpBooleanExpressionProposalImpl ( propagatedExpression , ImmutableMap . of ( providerNode , nonPropagatedExpression ) , currentChildNode , recipient , inbetweenProjectorsBuilder . build ( ) ) ; // Possibly adjust the proposal , to enforce that the second exception ( see the class comments ) holds
return adjustProposal ( proposal , query ) ; |
public class authenticationvserver_authenticationcertpolicy_binding { /** * Use this API to fetch authenticationvserver _ authenticationcertpolicy _ binding resources of given name . */
public static authenticationvserver_authenticationcertpolicy_binding [ ] get ( nitro_service service , String name ) throws Exception { } } | authenticationvserver_authenticationcertpolicy_binding obj = new authenticationvserver_authenticationcertpolicy_binding ( ) ; obj . set_name ( name ) ; authenticationvserver_authenticationcertpolicy_binding response [ ] = ( authenticationvserver_authenticationcertpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class BccClient { /** * Modifying the password of the instance .
* You can change the instance password only when the instance is Running or Stopped ,
* otherwise , it ' s will get < code > 409 < / code > errorCode .
* This is an asynchronous interface ,
* you can get the latest status by invoke { @ link # getInstance ( GetInstanceRequest ) }
* @ param instanceId The id of the instance .
* @ param adminPass The new password to update .
* The adminPass will be encrypted in AES - 128 algorithm
* with the substring of the former 16 characters of user SecretKey .
* @ throws BceClientException */
public void modifyInstancePassword ( String instanceId , String adminPass ) throws BceClientException { } } | this . modifyInstancePassword ( new ModifyInstancePasswordRequest ( ) . withInstanceId ( instanceId ) . withAdminPass ( adminPass ) ) ; |
public class AbstractAlpineQueryManager { /** * Given a query , this method will decorate that query with pagination , ordering ,
* and sorting direction . Specific checks are performed to ensure the execution
* of the query is capable of being paged and that ordering can be securely performed .
* @ param query the JDO Query object to execute
* @ return a Collection of objects
* @ since 1.0.0 */
public Query decorate ( final Query query ) { } } | // Clear the result to fetch if previously specified ( i . e . by getting count )
query . setResult ( null ) ; if ( pagination != null && pagination . isPaginated ( ) ) { final long begin = pagination . getOffset ( ) ; final long end = begin + pagination . getLimit ( ) ; query . setRange ( begin , end ) ; } if ( orderBy != null && RegexSequence . Pattern . ALPHA_NUMERIC . matcher ( orderBy ) . matches ( ) && orderDirection != OrderDirection . UNSPECIFIED ) { // Check to see if the specified orderBy field is defined in the class being queried .
boolean found = false ; final org . datanucleus . store . query . Query iq = ( ( JDOQuery ) query ) . getInternalQuery ( ) ; for ( final Field field : iq . getCandidateClass ( ) . getDeclaredFields ( ) ) { if ( orderBy . equals ( field . getName ( ) ) ) { found = true ; break ; } } if ( found ) { query . setOrdering ( orderBy + " " + orderDirection . name ( ) . toLowerCase ( ) ) ; } } return query ; |
public class Chain { /** * Compares two comparable objects as specified by { @ link
* Comparable # compareTo } , < i > if < / i > the result of this comparison chain
* has not already been determined .
* @ param left
* @ param right
* @ return { @ code ComparisonChain } */
public static < T extends Comparable < ? super T > > ComparisonChain compare ( T left , T right ) { } } | return new ComparisonChain ( ) . compare ( left , right ) ; |
public class MavenProjectUtil { /** * Get manifest file from plugin configuration
* @ param proj
* @ param pluginArtifactId
* @ return the manifest file */
public static File getManifestFile ( MavenProject proj , String pluginArtifactId ) { } } | Xpp3Dom dom = proj . getGoalConfiguration ( "org.apache.maven.plugins" , pluginArtifactId , null , null ) ; if ( dom != null ) { Xpp3Dom archive = dom . getChild ( "archive" ) ; if ( archive != null ) { Xpp3Dom val = archive . getChild ( "manifestFile" ) ; if ( val != null ) { return new File ( proj . getBasedir ( ) . getAbsolutePath ( ) + "/" + val . getValue ( ) ) ; } } } return null ; |
public class BoundednessFeature { /** * Calculates the radius to a given boundedness value
* @ param D Diffusion coefficient
* @ param N Number of steps
* @ param timelag Timelag
* @ param B Boundedeness
* @ return Confinement radius */
public static double getRadiusToBoundedness ( double D , int N , double timelag , double B ) { } } | double cov_area = a ( N ) * D * timelag ; double radius = Math . sqrt ( cov_area / ( 4 * B ) ) ; return radius ; |
public class OkRequest { /** * Set header to have given entry ' s key as the name and value as the value
* @ param header
* @ return this request */
public OkRequest < T > header ( final Map . Entry < String , String > header ) { } } | return header ( header . getKey ( ) , header . getValue ( ) ) ; |
public class HalFormsDocument { /** * Adds the given { @ link HalFormsTemplate } to the current document .
* @ param name must not be { @ literal null } or empty .
* @ param template must not be { @ literal null } .
* @ return */
public HalFormsDocument < T > andTemplate ( String name , HalFormsTemplate template ) { } } | Assert . hasText ( name , "Template name must not be null or empty!" ) ; Assert . notNull ( template , "Template must not be null!" ) ; Map < String , HalFormsTemplate > templates = new HashMap < > ( this . templates ) ; templates . put ( name , template ) ; return new HalFormsDocument < > ( resource , resources , embedded , pageMetadata , links , templates ) ; |
public class StringUtility { /** * Splits a string on the given delimiter over the given range .
* Does include all empty elements on the split .
* @ param words the words will be added to this collection .
* @ return the collection provided in words parameter */
public static < C extends Collection < String > > C splitString ( String line , int begin , int end , char delim , C words ) { } } | int pos = begin ; while ( pos < end ) { int start = pos ; pos = line . indexOf ( delim , pos ) ; if ( pos == - 1 || pos > end ) pos = end ; words . add ( line . substring ( start , pos ) ) ; pos ++ ; } // If ending in a delimeter , add the empty string
if ( end > begin && line . charAt ( end - 1 ) == delim ) words . add ( "" ) ; return words ; |
public class ns_vserver_appflow_config { /** * < pre >
* delete virtual server policy .
* < / pre > */
public static ns_vserver_appflow_config delete ( nitro_service client , ns_vserver_appflow_config resource ) throws Exception { } } | resource . validate ( "delete" ) ; return ( ( ns_vserver_appflow_config [ ] ) resource . delete_resource ( client ) ) [ 0 ] ; |
public class UpdateDeploymentGroupResult { /** * If the output contains no data , and the corresponding deployment group contained at least one Auto Scaling group ,
* AWS CodeDeploy successfully removed all corresponding Auto Scaling lifecycle event hooks from the AWS account . If
* the output contains data , AWS CodeDeploy could not remove some Auto Scaling lifecycle event hooks from the AWS
* account .
* @ return If the output contains no data , and the corresponding deployment group contained at least one Auto
* Scaling group , AWS CodeDeploy successfully removed all corresponding Auto Scaling lifecycle event hooks
* from the AWS account . If the output contains data , AWS CodeDeploy could not remove some Auto Scaling
* lifecycle event hooks from the AWS account . */
public java . util . List < AutoScalingGroup > getHooksNotCleanedUp ( ) { } } | if ( hooksNotCleanedUp == null ) { hooksNotCleanedUp = new com . amazonaws . internal . SdkInternalList < AutoScalingGroup > ( ) ; } return hooksNotCleanedUp ; |
public class SelectStatement { /** * Returns true if a non - frozen collection is selected , false otherwise . */
private boolean selectACollection ( ) { } } | if ( ! cfm . comparator . hasCollections ( ) ) return false ; for ( ColumnDefinition def : selection . getColumns ( ) ) { if ( def . type . isCollection ( ) && def . type . isMultiCell ( ) ) return true ; } return false ; |
public class SubscriptionRegistrar { /** * Method findCandidateSubsForWildcardExpr
* Used where a new consumer set monitor is being added on a wildcarded expression .
* Each category of subscription is checked to determine potential matches . The style of
* matching might be through direct MatchSpace evaluation , if possible , or by string
* matching .
* @ param topicExpression
* @ param topicStem
* @ param consumerSet
* @ throws SIDiscriminatorSyntaxException */
public void findCandidateSubsForWildcardExpr ( String topicExpression , String topicStem , Set consumerSet ) throws SIDiscriminatorSyntaxException { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "findCandidateSubsForWildcardExpr" , new Object [ ] { topicExpression , topicStem } ) ; // Inspect non - wildcarded and non - selector consumers
if ( _areExactNonSelectorSubs ) { // Use MatchSpace direct evaluation code once we ' ve isolated candidate
// expressions through string matching
evaluateCandidateExpression ( topicExpression , _exactNonSelectorSubs , topicStem , consumerSet ) ; } // Inspect non - wildcarded , selector consumers
if ( _areExactSelectorSubs ) { // Use MatchSpace direct evaluation code once we ' ve isolated candidate
// expressions through string matching
evaluateCandidateExpression ( topicExpression , _exactSelectorSubs , topicStem , consumerSet ) ; } // Inspect wildcarded and non - selector consumers
if ( _areWildcardNonSelectorSubs ) { // Use string matching
isolateCandidateWildcardSubs ( topicExpression , topicStem , _wildcardNonSelectorSubs , consumerSet ) ; } // Inspect wildcarded and selector consumers
if ( _areWildcardSelectorSubs ) { // Use string matching
isolateCandidateWildcardSubs ( topicExpression , topicStem , _wildcardSelectorSubs , consumerSet ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "findCandidateSubsForWildcardExpr" ) ; |
public class Validation { /** * method to check if the given connection is specific
* @ param connectionNotation
* input ConnectionNotation
* @ return true if the described connection is specific , false otherwise */
private static boolean isConnectionSpecific ( ConnectionNotation connectionNotation ) { } } | String connection = connectionNotation . getSourceUnit ( ) + ":" + connectionNotation . getrGroupSource ( ) + "-" + connectionNotation . getTargetUnit ( ) + ":" + connectionNotation . getrGroupTarget ( ) ; /* check for specific interaction */
if ( connection . matches ( "\\d+:R\\d-\\d+:R\\d|\\d+:pair-\\d+:pair" ) ) { return true ; } return false ; |
public class ParseUtil { /** * Match " inf " , " infinity " in a number of different capitalizations .
* @ param str String to match
* @ param firstchar First character
* @ param start Interval begin
* @ param end Interval end
* @ return { @ code true } when infinity was recognized . */
private static boolean matchInf ( byte [ ] str , byte firstchar , int start , int end ) { } } | final int len = end - start ; // The wonders of unicode . The infinity symbol \ u221E is three bytes :
if ( len == 3 && firstchar == - 0x1E && str [ start + 1 ] == - 0x78 && str [ start + 2 ] == - 0x62 ) { return true ; } if ( ( len != 3 && len != INFINITY_LENGTH ) || ( firstchar != 'I' && firstchar != 'i' ) ) { return false ; } for ( int i = 1 , j = INFINITY_LENGTH + 1 ; i < INFINITY_LENGTH ; i ++ , j ++ ) { final byte c = str [ start + i ] ; if ( c != INFINITY_PATTERN [ i ] && c != INFINITY_PATTERN [ j ] ) { return false ; } if ( i == 2 && len == 3 ) { return true ; } } return true ; |
public class Popover { /** * Create the popover . */
private void popover ( Element e , String content ) { } } | e . setAttribute ( "data-content" , content ) ; JQuery . jQuery ( e ) . popover ( ) ; |
public class SyncMembersInner { /** * Lists sync members in the given sync group .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; SyncMemberInner & gt ; object */
public Observable < Page < SyncMemberInner > > listBySyncGroupNextAsync ( final String nextPageLink ) { } } | return listBySyncGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < SyncMemberInner > > , Page < SyncMemberInner > > ( ) { @ Override public Page < SyncMemberInner > call ( ServiceResponse < Page < SyncMemberInner > > response ) { return response . body ( ) ; } } ) ; |
public class KiteConnect { /** * Retrieves individual mutualfunds order .
* @ param orderId is the order id of a mutualfunds scrip .
* @ return returns a single mutualfunds object with all the parameters .
* @ throws KiteException is thrown for all Kite trade related errors .
* @ throws IOException is thrown when there is connection related error . */
public MFOrder getMFOrder ( String orderId ) throws KiteException , IOException , JSONException { } } | JSONObject response = new KiteRequestHandler ( proxy ) . getRequest ( routes . get ( "mutualfunds.order" ) . replace ( ":order_id" , orderId ) , apiKey , accessToken ) ; return gson . fromJson ( response . get ( "data" ) . toString ( ) , MFOrder . class ) ; |
public class Grid { /** * This method appends a new item to the list of failed model parameters .
* < p > The failed parameters object represents a point in hyper space which cannot be used for
* model building . < / p >
* < p > Should be used only from < code > GridSearch < / code > job . < / p >
* @ param params model parameters which caused model builder failure
* @ params e exception causing a failure */
void appendFailedModelParameters ( MP params , Exception e ) { } } | assert params != null : "Model parameters should be always != null !" ; String [ ] rawParams = ArrayUtils . toString ( getHyperValues ( params ) ) ; appendFailedModelParameters ( params , rawParams , e . getMessage ( ) , StringUtils . toString ( e ) ) ; |
public class UsageAwareDatastore { /** * { @ inheritDoc } */
private synchronized final UsageAwareDatastoreConnection < E > getDatastoreConnection ( ) { } } | if ( _datastoreConnection != null ) { return _datastoreConnection ; } UsageAwareDatastoreConnection < E > datastoreConnection ; if ( _datastoreConnectionRef != null ) { datastoreConnection = _datastoreConnectionRef . get ( ) ; if ( datastoreConnection != null && datastoreConnection . requestUsage ( ) ) { // reuse existing data context provider
logger . debug ( "Reusing existing DatastoreConnection: {}" , datastoreConnection ) ; return datastoreConnection ; } } datastoreConnection = createDatastoreConnection ( ) ; if ( datastoreConnection == null ) { throw new IllegalStateException ( "createDatastoreConnection() returned null" ) ; } _datastoreConnectionRef = new WeakReference < UsageAwareDatastoreConnection < E > > ( datastoreConnection ) ; return datastoreConnection ; |
public class PosTagUtil { /** * 转为863标注集 < br >
* 863词性标注集 , 其各个词性含义如下表 :
* TagDescriptionExampleTagDescriptionExample
* aadjective美丽niorganization name保险公司
* bother noun - modifier大型 , 西式nllocation noun城郊
* cconjunction和 , 虽然nsgeographical name北京
* dadverb很nttemporal noun近日 , 明代
* eexclamation哎nzother proper noun诺贝尔奖
* gmorpheme茨 , 甥oonomatopoeia哗啦
* hprefix阿 , 伪ppreposition在 , 把
* iidiom百花齐放qquantity个
* jabbreviation公检法rpronoun我们
* ksuffix界 , 率uauxiliary的 , 地
* mnumber一 , 第一vverb跑 , 学习
* ngeneral noun苹果wppunctuation , 。 !
* nddirection noun右侧wsforeign wordsCPU
* nhperson name杜甫 , 汤姆xnon - lexeme萄 , 翱
* @ param termList
* @ return */
public static List < String > to863 ( List < Term > termList ) { } } | List < String > posTagList = new ArrayList < String > ( termList . size ( ) ) ; for ( Term term : termList ) { String posTag = posConverter . get ( term . nature . toString ( ) ) ; if ( posTag == null ) posTag = term . nature . toString ( ) ; posTagList . add ( posTag ) ; } return posTagList ; |
public class StaticCATConsumer { /** * Register an asynchronous consumer for this consumer session .
* Fields :
* BIT16 ConnectionObjectId
* BIT16 SyncConsumerSessionId
* BIT16 MessageOrderContextId
* BIT16 ClientSessionId
* BIT32 Max batch size
* BIT32 Max Sequential Failures
* @ param request
* @ param conversation
* @ param requestNumber
* @ param allocatedFromBufferPool
* @ param partOfExchange
* @ param stoppable */
static void rcvRegisterAsyncConsumer ( CommsByteBuffer request , Conversation conversation , int requestNumber , boolean allocatedFromBufferPool , boolean partOfExchange , boolean stoppable ) // SIB0115d . comms
{ } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "rcvRegisterAsyncConsumer" , new Object [ ] { request , conversation , requestNumber , allocatedFromBufferPool , stoppable // SIB0115d . comms
} ) ; short connectionObjectId = request . getShort ( ) ; // BIT16 ConnectionObjectId
short consumerObjectId = request . getShort ( ) ; // BIT16 SyncConsumerSessionId
short orderContextId = request . getShort ( ) ; // BIT16 OrderContextId
short clientSessionId = request . getShort ( ) ; // BIT16 ClientSessionId
int maxActiveMessages = request . getInt ( ) ; // BIT32 Max active messages
long messageLockExpiry = request . getLong ( ) ; // BIT64 Message lock expiry
int maxBatchSize = request . getInt ( ) ; // BIT32 Maximum batch size
int maxSequentialFailures = 0 ; // BIT32 Max Sequental Failures SIB0115d . comms
long hiddenMessageDelay = 0 ; // BIT64 hidden message delay
// If stoppable get the maxSequentialFailures value
if ( stoppable ) { // SIB0115d . comms
maxSequentialFailures = request . getInt ( ) ; // SIB0115d . comms
hiddenMessageDelay = request . getLong ( ) ; } // SIB0115d . comms
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "connectionObjectID=" + connectionObjectId ) ; SibTr . debug ( tc , "consumerObjectID=" + consumerObjectId ) ; SibTr . debug ( tc , "orderContextID=" + orderContextId ) ; SibTr . debug ( tc , "clientSessionID=" + clientSessionId ) ; SibTr . debug ( tc , "maxActiveMessages=" + maxActiveMessages ) ; SibTr . debug ( tc , "messageLockExpiry=" + messageLockExpiry ) ; SibTr . debug ( tc , "maxBatchSize=" + maxBatchSize ) ; SibTr . debug ( tc , "maxSequentialFailures=" + maxSequentialFailures ) ; // SIB0115d . comms
SibTr . debug ( tc , "hiddenMesageDelay=" + hiddenMessageDelay ) ; } ConversationState convState = ( ConversationState ) conversation . getAttachment ( ) ; CATMainConsumer mainConsumer = ( ( CATMainConsumer ) convState . getObject ( consumerObjectId ) ) ; OrderingContext orderContext = null ; // Get the message order context if there was one passed up
if ( orderContextId != CommsConstants . NO_ORDER_CONTEXT ) { orderContext = ( ( CATOrderingContext ) convState . getObject ( orderContextId ) ) . getOrderingContext ( ) ; // F201521
} mainConsumer . setClientSessionId ( clientSessionId ) ; mainConsumer . setAsynchConsumerCallback ( requestNumber , maxActiveMessages , messageLockExpiry , maxBatchSize , orderContext , stoppable , // SIB0115d . comms
maxSequentialFailures , hiddenMessageDelay ) ; // SIB0115d . comms
request . release ( allocatedFromBufferPool ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "rcvRegisterAsyncConsumer" ) ; |
public class ServiceExtensionLoader { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . api . ExtensionLoader # load ( java . lang . Class , org . jboss . shrinkwrap . api . Archive ) */
@ Override public < T extends Assignable > T load ( Class < T > extensionClass , Archive < ? > baseArchive ) throws UnknownExtensionTypeException { } } | if ( isCached ( extensionClass ) ) { return createFromCache ( extensionClass , baseArchive ) ; } T object = createFromLoadExtension ( extensionClass , baseArchive ) ; addToCache ( extensionClass , object . getClass ( ) ) ; return object ; |
public class JvmExecutableImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setVarArgs ( boolean newVarArgs ) { } } | boolean oldVarArgs = varArgs ; varArgs = newVarArgs ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_EXECUTABLE__VAR_ARGS , oldVarArgs , varArgs ) ) ; |
public class Response { /** * 将错误发送到容器
* @ param errorCode 错误代码
* @ param errorContent 错误信息 */
public static void sendError ( int errorCode , String errorContent ) { } } | HttpServletResponse response = Response . getServletResponse ( ) ; try { if ( HuluSetting . isDevMode ) { response . sendError ( errorCode ) ; } else { response . sendError ( errorCode , errorContent ) ; } } catch ( IOException e ) { log . error ( e , "Error when sendError!" ) ; } |
public class InjectorBuilder { /** * Iterate through all elements of the current module and pass the output of the
* ElementVisitor to the provided consumer . ' null ' responses from the visitor are ignored .
* This call will not modify any bindings
* @ param visitor */
public < T > InjectorBuilder forEachElement ( ElementVisitor < T > visitor , Consumer < T > consumer ) { } } | Elements . getElements ( module ) . forEach ( element -> Optional . ofNullable ( element . acceptVisitor ( visitor ) ) . ifPresent ( consumer ) ) ; return this ; |
public class KmeansCalculator { /** * ベース中心点 、 マージ対象中心点のマッピングを行う 。
* @ param centroidNum 中心点数
* @ param allDistance ユークリッド距離エンティティリスト
* @ return マッピング */
protected static Map < Integer , Integer > createCentroidMappings ( int centroidNum , List < CentroidMapping > allDistance ) { } } | Set < Integer > baseSet = new HashSet < > ( ) ; Set < Integer > targetSet = new HashSet < > ( ) ; Map < Integer , Integer > resultMapping = new TreeMap < > ( ) ; int mappingNum = 0 ; // 算出したユークリッド処理リストをソートして使用
for ( CentroidMapping targetDistance : allDistance ) { // 既にマッピングに使用されている場合は省略する 。
if ( baseSet . contains ( targetDistance . getBaseIndex ( ) ) || targetSet . contains ( targetDistance . getTargetIndex ( ) ) ) { continue ; } baseSet . add ( targetDistance . getBaseIndex ( ) ) ; targetSet . add ( targetDistance . getTargetIndex ( ) ) ; resultMapping . put ( targetDistance . getBaseIndex ( ) , targetDistance . getTargetIndex ( ) ) ; mappingNum ++ ; // マッピングが必要数確保できた時点でマッピングを終了
if ( mappingNum >= centroidNum ) { break ; } } return resultMapping ; |
public class CmsResourceTypeConfig { /** * Creates a new element . < p >
* @ param userCms the CMS context to use
* @ param modelResource the model resource to use
* @ param pageFolderRootPath the root path of the folder containing the current container page
* @ return the created resource
* @ throws CmsException if something goes wrong */
public CmsResource createNewElement ( CmsObject userCms , CmsResource modelResource , String pageFolderRootPath ) throws CmsException { } } | checkOffline ( userCms ) ; checkInitialized ( ) ; CmsObject rootCms = rootCms ( userCms ) ; String folderPath = getFolderPath ( userCms , pageFolderRootPath ) ; CmsVfsUtil . createFolder ( userCms , folderPath ) ; String destination = CmsStringUtil . joinPaths ( folderPath , getNamePattern ( true ) ) ; String creationPath = OpenCms . getResourceManager ( ) . getNameGenerator ( ) . getNewFileName ( rootCms , destination , 5 ) ; // set the content locale
rootCms . getRequestContext ( ) . setAttribute ( CmsRequestContext . ATTRIBUTE_NEW_RESOURCE_LOCALE , userCms . getRequestContext ( ) . getLocale ( ) ) ; if ( modelResource != null ) { // set the model resource
rootCms . getRequestContext ( ) . setAttribute ( CmsRequestContext . ATTRIBUTE_MODEL , modelResource . getRootPath ( ) ) ; } CmsResource createdResource = rootCms . createResource ( creationPath , getType ( ) , null , new ArrayList < CmsProperty > ( 0 ) ) ; if ( modelResource != null ) { // set the model resource
CmsCategoryService . getInstance ( ) . copyCategories ( rootCms , modelResource , creationPath ) ; } try { rootCms . unlockResource ( creationPath ) ; } catch ( CmsLockException e ) { // probably the parent folder is locked
LOG . info ( e . getLocalizedMessage ( ) , e ) ; } return createdResource ; |
public class KeyVaultClientBaseImpl { /** * Lists the specified certificate issuer .
* The GetCertificateIssuer operation returns the specified certificate issuer resources in the specified key vault . This operation requires the certificates / manageissuers / getissuers permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ param issuerName The name of the issuer .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the IssuerBundle object */
public Observable < IssuerBundle > getCertificateIssuerAsync ( String vaultBaseUrl , String issuerName ) { } } | return getCertificateIssuerWithServiceResponseAsync ( vaultBaseUrl , issuerName ) . map ( new Func1 < ServiceResponse < IssuerBundle > , IssuerBundle > ( ) { @ Override public IssuerBundle call ( ServiceResponse < IssuerBundle > response ) { return response . body ( ) ; } } ) ; |
public class Messages { /** * Get a specific room information
* @ param company Company ID
* @ param roomId Room ID
* @ param params Parameters
* @ throwsJSONException If error occurred
* @ return { @ link JSONObject } */
public JSONObject getRoomDetails ( String company , String roomId , HashMap < String , String > params ) throws JSONException { } } | return oClient . get ( "/messages/v3/" + company + "/rooms/" + roomId , params ) ; |
public class SmartHandle { /** * Drop an argument name and type from the handle at the given index , returning a new
* SmartHandle .
* @ param beforeName name before which the dropped argument goes
* @ param newName name of the argument
* @ param type type of the argument
* @ return a new SmartHandle with the additional argument */
public SmartHandle drop ( String beforeName , String newName , Class < ? > type ) { } } | return new SmartHandle ( signature . insertArg ( beforeName , newName , type ) , MethodHandles . dropArguments ( handle , signature . argOffset ( beforeName ) , type ) ) ; |
public class SoftTFIDFDictionary { /** * subroutine of lookup */
private void storeUpperBound ( Token tok , Token simTok , List usefulTokens , Map upperBoundOnWeight , double sim ) { } } | double upperBound = tfidfDistance . getWeight ( tok ) * maxTFIDFScore [ simTok . getIndex ( ) ] * sim ; if ( DEBUG ) System . out . println ( "upper-bounding tok " + simTok + " sim=" + sim + " to " + tok + " upperBound " + upperBound ) ; if ( DEBUG ) System . out . println ( "upperBound = " + tfidfDistance . getWeight ( tok ) + "*" + maxTFIDFScore [ simTok . getIndex ( ) ] + "*" + sim ) ; usefulTokens . add ( simTok ) ; upperBoundOnWeight . put ( simTok , new Double ( upperBound ) ) ; |
public class ManipulationUtils { /** * Adds the retrieveInternalModelId method to the class . */
private static void addRetrieveInternalModelId ( CtClass clazz ) throws NotFoundException , CannotCompileException { } } | CtField modelIdField = null ; CtClass temp = clazz ; while ( temp != null ) { for ( CtField field : temp . getDeclaredFields ( ) ) { if ( JavassistUtils . hasAnnotation ( field , OpenEngSBModelId . class . getName ( ) ) ) { modelIdField = field ; break ; } } temp = temp . getSuperclass ( ) ; } CtClass [ ] params = generateClassField ( ) ; CtMethod valueMethod = new CtMethod ( cp . get ( Object . class . getName ( ) ) , "retrieveInternalModelId" , params , clazz ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( createTrace ( "Called retrieveInternalModelId" ) ) ; CtMethod idFieldGetter = getFieldGetter ( modelIdField , clazz ) ; if ( modelIdField == null || idFieldGetter == null ) { builder . append ( "return null;" ) ; } else { builder . append ( String . format ( "return %s();" , idFieldGetter . getName ( ) ) ) ; } valueMethod . setBody ( createMethodBody ( builder . toString ( ) ) ) ; clazz . addMethod ( valueMethod ) ; CtMethod nameMethod = new CtMethod ( cp . get ( String . class . getName ( ) ) , "retrieveInternalModelIdName" , generateClassField ( ) , clazz ) ; if ( modelIdField == null ) { nameMethod . setBody ( createMethodBody ( "return null;" ) ) ; } else { nameMethod . setBody ( createMethodBody ( "return \"" + modelIdField . getName ( ) + "\";" ) ) ; } clazz . addMethod ( nameMethod ) ; |
public class DeterminismHash { /** * Serialize the running hashes to an array and complete the overall
* hash for the first int value in the array . */
public int [ ] get ( ) { } } | int includedHashes = Math . min ( m_hashCount , MAX_HASHES_COUNT ) ; int [ ] retval = new int [ includedHashes + HEADER_OFFSET ] ; System . arraycopy ( m_hashes , 0 , retval , HEADER_OFFSET , includedHashes ) ; m_inputCRC . update ( m_hashCount ) ; m_inputCRC . update ( m_catalogVersion ) ; retval [ 0 ] = ( int ) m_inputCRC . getValue ( ) ; retval [ 1 ] = m_catalogVersion ; retval [ 2 ] = m_hashCount ; return retval ; |
public class DiscreteInterval { /** * Returns an interval representing the addition of the
* given interval with this one .
* @ param other interval to add to this one
* @ return interval sum */
public DiscreteInterval plus ( DiscreteInterval other ) { } } | return new DiscreteInterval ( this . min + other . min , this . max + other . max ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.