signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NDArrayIndex { /** * Creates an index covering the given shape * ( for each dimension 0 , shape [ i ] ) * @ param shape the shape to cover * @ return the ndarray indexes to cover */ public static INDArrayIndex [ ] createCoveringShape ( int [ ] shape ) { } }
INDArrayIndex [ ] ret = new INDArrayIndex [ shape . length ] ; for ( int i = 0 ; i < ret . length ; i ++ ) { ret [ i ] = NDArrayIndex . interval ( 0 , shape [ i ] ) ; } return ret ;
public class VTensor { /** * Gets the value of the entry corresponding to the given indices . * @ param indices The indices of the multi - dimensional array . * @ return The current value . */ public double getFast ( int i0 , int i1 , int i2 ) { } }
int c = offset ; c += strides [ 0 ] * i0 ; c += strides [ 1 ] * i1 ; c += strides [ 2 ] * i2 ; return values . get ( c ) ;
public class GeometryPath { /** * Add a coordinate to the path . * @ param coordinate */ public void addCoordinate ( Coordinate coordinate ) { } }
Coordinate [ ] newCoords = new Coordinate [ coordinates . length + 1 ] ; System . arraycopy ( coordinates , 0 , newCoords , 0 , coordinates . length ) ; newCoords [ coordinates . length ] = coordinate ; setCoordinates ( newCoords ) ;
public class RobotExclusionFilter { private RobotRules getRules ( CaptureSearchResult result ) { } }
RobotRules rules = null ; RobotRules tmpRules = null ; String host ; try { host = result . getOriginalHost ( ) ; } catch ( Exception e ) { LOGGER . warning ( "ROBOT: Failed to get host from(" + result . getOriginalUrl ( ) + ")" ) ; return null ; } String scheme = UrlOperations . urlToScheme ( result . getOriginalUrl ( ) ) ; List < String > urlStrings = searchResultToRobotUrlStrings ( host , scheme ) ; Iterator < String > itr = urlStrings . iterator ( ) ; String firstUrlString = null ; // loop through them all . As soon as we get a response , store that // in the cache for the FIRST url we tried and return it . . // If we get no responses for any of the robot URLs , use " empty " rules , // and record that in the cache , too . while ( rules == null && itr . hasNext ( ) ) { String urlString = ( String ) itr . next ( ) ; if ( firstUrlString == null ) { firstUrlString = urlString ; } if ( rulesCache . containsKey ( urlString ) ) { LOGGER . fine ( "ROBOT: Cached(" + urlString + ")" ) ; rules = rulesCache . get ( urlString ) ; if ( ! urlString . equals ( firstUrlString ) ) { LOGGER . fine ( "Adding extra url(" + firstUrlString + ") for prev cached rules(" + urlString + ")" ) ; rulesCache . put ( firstUrlString , rules ) ; } } else { // long start = System . currentTimeMillis ( ) ; ; Resource resource = null ; try { PerfStats . timeStart ( PerfStat . RobotsFetchTotal ) ; if ( LOGGER . isLoggable ( Level . FINE ) ) { LOGGER . fine ( "ROBOT: NotCached - Downloading(" + urlString + ")" ) ; } tmpRules = new RobotRules ( ) ; resource = webCache . getCachedResource ( new URL ( urlString ) , maxCacheMS , true ) ; // long elapsed = System . currentTimeMillis ( ) - start ; // PerformanceLogger . noteElapsed ( " RobotRequest " , elapsed , urlString ) ; if ( resource . getStatusCode ( ) != 200 ) { LOGGER . info ( "ROBOT: NotAvailable(" + urlString + ")" ) ; throw new LiveDocumentNotAvailableException ( urlString ) ; } tmpRules . parse ( resource ) ; rulesCache . put ( firstUrlString , tmpRules ) ; rules = tmpRules ; if ( LOGGER . isLoggable ( Level . FINE ) ) { LOGGER . fine ( "ROBOT: Downloaded(" + urlString + ")" ) ; } } catch ( LiveDocumentNotAvailableException e ) { LOGGER . info ( "ROBOT: LiveDocumentNotAvailableException(" + urlString + ")" ) ; } catch ( MalformedURLException e ) { // e . printStackTrace ( ) ; LOGGER . warning ( "ROBOT: MalformedURLException(" + urlString + ")" ) ; return null ; } catch ( IOException e ) { LOGGER . warning ( "ROBOT: IOException(" + urlString + "):" + e . getLocalizedMessage ( ) ) ; return null ; } catch ( LiveWebCacheUnavailableException e ) { LOGGER . severe ( "ROBOT: LiveWebCacheUnavailableException(" + urlString + ")" ) ; if ( filterGroup != null ) { filterGroup . setLiveWebGone ( ) ; } return null ; } catch ( LiveWebTimeoutException e ) { LOGGER . severe ( "ROBOT: LiveDocumentTimedOutException(" + urlString + ")" ) ; if ( filterGroup != null ) { filterGroup . setRobotTimedOut ( ) ; } return null ; } finally { if ( resource != null ) { try { resource . close ( ) ; } catch ( IOException e ) { } resource = null ; } // long elapsed = System . currentTimeMillis ( ) - start ; // PerformanceLogger . noteElapsed ( " RobotRequest " , elapsed , urlString ) ; PerfStats . timeEnd ( PerfStat . RobotsFetchTotal ) ; } } } if ( rules == null ) { // special - case , allow empty rules if no longer available . rulesCache . put ( firstUrlString , emptyRules ) ; rules = emptyRules ; LOGGER . fine ( "No rules available, using emptyRules for:" + firstUrlString ) ; } return rules ;
public class Matrix4f { /** * Set the values of this matrix by reading 16 float values from off - heap memory in column - major order , * starting at the given address . * This method will throw an { @ link UnsupportedOperationException } when JOML is used with ` - Djoml . nounsafe ` . * < em > This method is unsafe as it can result in a crash of the JVM process when the specified address range does not belong to this process . < / em > * @ param address * the off - heap memory address to read the matrix values from in column - major order * @ return this */ public Matrix4f setFromAddress ( long address ) { } }
if ( Options . NO_UNSAFE ) throw new UnsupportedOperationException ( "Not supported when using joml.nounsafe" ) ; MemUtil . MemUtilUnsafe unsafe = ( MemUtil . MemUtilUnsafe ) MemUtil . INSTANCE ; unsafe . get ( this , address ) ; _properties ( 0 ) ; return this ;
public class ByteSerializer { /** * = = = = = These methods apply to all whole numbers with minor modifications = = = = = * = = = = = boolean , byte , short , int , long = = = = = */ @ Override public Byte convert ( Object value ) { } }
if ( value instanceof Number ) { double d = ( ( Number ) value ) . doubleValue ( ) ; if ( Double . isNaN ( d ) || Math . round ( d ) != d ) throw new IllegalArgumentException ( "Not a valid byte: " + value ) ; long l = ( ( Number ) value ) . longValue ( ) ; if ( l >= Byte . MIN_VALUE && l <= Byte . MAX_VALUE ) return Byte . valueOf ( ( byte ) l ) ; else throw new IllegalArgumentException ( "Value too large for byte: " + value ) ; } else if ( value instanceof String ) { return Byte . parseByte ( ( String ) value ) ; } else return null ;
public class ModelsImpl { /** * Adds a batch of sublists to an existing closedlist . * @ param appId The application ID . * @ param versionId The version ID . * @ param clEntityId The closed list model ID . * @ param patchClosedListOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatus object if successful . */ public OperationStatus patchClosedList ( UUID appId , String versionId , UUID clEntityId , PatchClosedListOptionalParameter patchClosedListOptionalParameter ) { } }
return patchClosedListWithServiceResponseAsync ( appId , versionId , clEntityId , patchClosedListOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DescriptorImporterBase { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . descriptor . api . DescriptorImporter # fromStream ( java . io . InputStream ) */ @ Override public T fromStream ( final InputStream in ) throws IllegalArgumentException , DescriptorImportException { } }
return fromStream ( in , true ) ;
public class WVideo { /** * Handles a request for the poster . */ private void handlePosterRequest ( ) { } }
Image poster = getComponentModel ( ) . poster ; if ( poster != null ) { ContentEscape escape = new ContentEscape ( poster ) ; escape . setCacheable ( ! Util . empty ( getCacheKey ( ) ) ) ; throw escape ; } else { LOG . warn ( "Client requested non-existant poster" ) ; }
public class PullerInternal { /** * Process a bunch of remote revisions from the _ changes feed at once */ @ Override @ InterfaceAudience . Private protected void processInbox ( RevisionList inbox ) { } }
Log . d ( TAG , "processInbox called" ) ; if ( db == null || ! db . isOpen ( ) ) { Log . w ( Log . TAG_SYNC , "%s: Database is null or closed. Unable to continue. db name is %s." , this , db . getName ( ) ) ; return ; } if ( canBulkGet == null ) { canBulkGet = serverIsSyncGatewayVersion ( "0.81" ) ; } // Ask the local database which of the revs are not known to it : String lastInboxSequence = ( ( PulledRevision ) inbox . get ( inbox . size ( ) - 1 ) ) . getRemoteSequenceID ( ) ; int numRevisionsRemoved = 0 ; try { // findMissingRevisions is the local equivalent of _ revs _ diff . it looks at the // array of revisions in " inbox " and removes the ones that already exist . // So whatever ' s left in ' inbox ' // afterwards are the revisions that need to be downloaded . numRevisionsRemoved = db . findMissingRevisions ( inbox ) ; } catch ( SQLException e ) { Log . e ( TAG , String . format ( Locale . ENGLISH , "%s failed to look up local revs" , this ) , e ) ; inbox = null ; } // introducing this to java version since inbox may now be null everywhere int inboxCount = 0 ; if ( inbox != null ) { inboxCount = inbox . size ( ) ; } if ( numRevisionsRemoved > 0 ) { Log . v ( TAG , "%s: processInbox() setting changesCount to: %s" , this , getChangesCount ( ) . get ( ) - numRevisionsRemoved ) ; // May decrease the changesCount , to account for the revisions we just found out we don ' t need to get . addToChangesCount ( - 1 * numRevisionsRemoved ) ; } if ( inboxCount == 0 ) { // Nothing to do . Just bump the lastSequence . Log . d ( TAG , "%s no new remote revisions to fetch. add lastInboxSequence (%s) to pendingSequences (%s)" , this , lastInboxSequence , pendingSequences ) ; long seq = pendingSequences . addValue ( lastInboxSequence ) ; pendingSequences . removeSequence ( seq ) ; setLastSequence ( pendingSequences . getCheckpointedValue ( ) ) ; pauseOrResume ( ) ; return ; } Log . v ( TAG , "%s: fetching %s remote revisions..." , this , inboxCount ) ; // Dump the revs into the queue of revs to pull from the remote db : for ( int i = 0 ; i < inbox . size ( ) ; i ++ ) { PulledRevision rev = ( PulledRevision ) inbox . get ( i ) ; if ( canBulkGet || ( rev . getGeneration ( ) == 1 && ! rev . isDeleted ( ) && ! rev . isConflicted ( ) ) ) { bulkRevsToPull . add ( rev ) ; } else { queueRemoteRevision ( rev ) ; } rev . setSequence ( pendingSequences . addValue ( rev . getRemoteSequenceID ( ) ) ) ; } pullRemoteRevisions ( ) ; pauseOrResume ( ) ;
public class ApiOvhMsServices { /** * Get this object properties * REST : GET / msServices / { serviceName } / account / { userPrincipalName } / sync * @ param serviceName [ required ] The internal name of your Active Directory organization * @ param userPrincipalName [ required ] User Principal Name * API beta */ public OvhSyncInformation serviceName_account_userPrincipalName_sync_GET ( String serviceName , String userPrincipalName ) throws IOException { } }
String qPath = "/msServices/{serviceName}/account/{userPrincipalName}/sync" ; StringBuilder sb = path ( qPath , serviceName , userPrincipalName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhSyncInformation . class ) ;
public class JSMessageImpl { /** * Locking : Needs to lock to ensure map etc don ' t change in the middle . */ private int getCase ( int varIndex ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "getCase" , new Object [ ] { Integer . valueOf ( varIndex ) } ) ; int result ; synchronized ( getMessageLockArtefact ( ) ) { if ( map != null ) { result = map . choiceCodes [ varIndex ] ; } else if ( choiceCache == null ) { result = - 1 ; } else { result = choiceCache [ varIndex ] ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "getCase" , Integer . valueOf ( result ) ) ; return result ;
public class ExceptionLogger { /** * Returns a consumer that can for example be used in the { @ link TextChannel # typeContinuously ( Consumer ) } method . * It unwraps { @ link CompletionException CompletionExceptions } , * { @ link InvocationTargetException InvocationTargetExceptions } and { @ link ExecutionException ExecutionExceptions } * first , and then adds a fresh { @ code CompletionException } as wrapper with the stacktrace of the caller of this * method and logs it afterwards . * The rewrapped exception is only logged if it is not in the { @ code ignoredThrowableTypes } . * @ param ignoredThrowableTypes The throwable types that should never be logged . * @ return A consumer which logs the given throwable . */ @ SafeVarargs public static Consumer < Throwable > getConsumer ( Class < ? extends Throwable > ... ignoredThrowableTypes ) { } }
return getConsumer ( null , ignoredThrowableTypes ) ;
public class JdbcTarget { /** * A list of glob patterns used to exclude from the crawl . For more information , see < a * href = " http : / / docs . aws . amazon . com / glue / latest / dg / add - crawler . html " > Catalog Tables with a Crawler < / a > . * @ param exclusions * A list of glob patterns used to exclude from the crawl . For more information , see < a * href = " http : / / docs . aws . amazon . com / glue / latest / dg / add - crawler . html " > Catalog Tables with a Crawler < / a > . */ public void setExclusions ( java . util . Collection < String > exclusions ) { } }
if ( exclusions == null ) { this . exclusions = null ; return ; } this . exclusions = new java . util . ArrayList < String > ( exclusions ) ;
public class A_CmsXmlDocument { /** * Removes the bookmark for an element with the given name and locale . < p > * @ param path the lookup path to use for the bookmark * @ param locale the locale of the element * @ return the element removed from the bookmarks or null */ protected I_CmsXmlContentValue removeBookmark ( String path , Locale locale ) { } }
// remove mapping of element name to locale Set < Locale > sl ; sl = m_elementLocales . get ( path ) ; if ( sl != null ) { sl . remove ( locale ) ; } // remove mapping of locale to element name Set < String > sn = m_elementNames . get ( locale ) ; if ( sn != null ) { sn . remove ( path ) ; } // remove the bookmark and return the removed element return m_bookmarks . remove ( getBookmarkName ( path , locale ) ) ;
public class RoboGraphics { /** * called when our view appears */ void viewDidInit ( CGRect bounds ) { } }
defaultFramebuffer = gl . glGetInteger ( GL20 . GL_FRAMEBUFFER_BINDING ) ; if ( defaultFramebuffer == 0 ) throw new IllegalStateException ( "Failed to determine defaultFramebuffer" ) ; boundsChanged ( bounds ) ;
public class HighlightOptions { /** * Add names of fields to highlight on * @ param fieldnames * @ return */ public HighlightOptions addFields ( Collection < String > fieldnames ) { } }
Assert . notNull ( fieldnames , "Fieldnames must not be null!" ) ; for ( String fieldname : fieldnames ) { addField ( fieldname ) ; } return this ;
public class LineSeriesData { /** * Gets the size of the data . * @ return the data */ public int size ( ) { } }
int ret = 0 ; for ( Collection < LineSeriesItem < I , V > > series : data ) { if ( series . size ( ) > 0 ) ++ ret ; } return ret ;
public class Instant { /** * Obtains an instance of { @ code Instant } from a text string such as * { @ code 2007-12-03T10:15:30.00Z } . * The string must represent a valid instant in UTC and is parsed using * { @ link DateTimeFormatter # ISO _ INSTANT } . * @ param text the text to parse , not null * @ return the parsed instant , not null * @ throws DateTimeParseException if the text cannot be parsed */ public static Instant parse ( final CharSequence text ) { } }
return DateTimeFormatter . ISO_INSTANT . parse ( text , Instant :: from ) ;
public class PngOptimizer { private PngByteArrayOutputStream serialize ( List < byte [ ] > scanlines ) { } }
final int scanlineLength = scanlines . get ( 0 ) . length ; final byte [ ] imageData = new byte [ scanlineLength * scanlines . size ( ) ] ; for ( int i = 0 ; i < scanlines . size ( ) ; i ++ ) { final int offset = i * scanlineLength ; final byte [ ] scanline = scanlines . get ( i ) ; System . arraycopy ( scanline , 0 , imageData , offset , scanlineLength ) ; } return new PngByteArrayOutputStream ( imageData ) ;
public class AbstrCFMLExprTransformer { /** * Negate Numbers * @ return CFXD Element * @ throws TemplateException */ private Expression negatePlusMinusOp ( Data data ) throws TemplateException { } }
// And Operation Position line = data . srcCode . getPosition ( ) ; if ( data . srcCode . forwardIfCurrent ( '-' ) ) { // pre increment if ( data . srcCode . forwardIfCurrent ( '-' ) ) { comments ( data ) ; Expression expr = clip ( data ) ; return data . factory . opUnary ( ( Variable ) expr , data . factory . DOUBLE_ONE ( ) , Factory . OP_UNARY_PRE , Factory . OP_UNARY_MINUS , line , data . srcCode . getPosition ( ) ) ; // ExprDouble res = OpDouble . toExprDouble ( expr , LitDouble . toExprDouble ( 1D ) , OpDouble . MINUS ) ; // return new OpVariable ( ( Variable ) expr , res , data . cfml . getPosition ( ) ) ; } comments ( data ) ; return data . factory . opNegateNumber ( clip ( data ) , Factory . OP_NEG_NBR_MINUS , line , data . srcCode . getPosition ( ) ) ; } else if ( data . srcCode . forwardIfCurrent ( '+' ) ) { if ( data . srcCode . forwardIfCurrent ( '+' ) ) { comments ( data ) ; Expression expr = clip ( data ) ; return data . factory . opUnary ( ( Variable ) expr , data . factory . DOUBLE_ONE ( ) , Factory . OP_UNARY_PRE , Factory . OP_UNARY_PLUS , line , data . srcCode . getPosition ( ) ) ; } comments ( data ) ; return data . factory . toExprDouble ( clip ( data ) ) ; // OpNegateNumber . toExprDouble ( clip ( ) , OpNegateNumber . PLUS , line ) ; } return clip ( data ) ;
public class RedisImpl { /** * 初始化默认连接数量 . */ protected void initPool ( ) { } }
// System . err . println ( " initPool server : " + this . server + // " initialPoolSize : " + initialPoolSize + " start " ) ; if ( this . initialPoolSize <= 0 ) { return ; } // windows环境关闭初始化默认redis连接功能 ? if ( System . getProperty ( "os.name" ) . startsWith ( "Windows" ) ) { return ; } int size ; if ( this . initialPoolSize > this . maxActive ) { size = this . maxActive ; } else { size = this . initialPoolSize ; } Jedis [ ] jedisArr = new Jedis [ size ] ; for ( int i = 0 ; i < jedisArr . length ; i ++ ) { jedisArr [ i ] = this . getResource ( ) ; } // int numActive = pool . getInternalPool ( ) . getNumActive ( ) ; for ( int i = 0 ; i < jedisArr . length ; i ++ ) { this . returnResource ( jedisArr [ i ] ) ; } // int numActive2 = pool . getInternalPool ( ) . getNumActive ( ) ; // System . err . println ( " initPool server : " + this . server + // " initialPoolSize : " + initialPoolSize + " numActive : " + numActive + // " numActive2 : " + numActive2 + " end " ) ;
public class SymbolManager { /** * Set the IconRotationAlignment property * In combination with { @ link Property . SYMBOL _ PLACEMENT } , determines the rotation behavior of icons . * @ param value property wrapper value around String */ public void setIconRotationAlignment ( @ Property . ICON_ROTATION_ALIGNMENT String value ) { } }
PropertyValue propertyValue = iconRotationAlignment ( value ) ; constantPropertyUsageMap . put ( PROPERTY_ICON_ROTATION_ALIGNMENT , propertyValue ) ; layer . setProperties ( propertyValue ) ;
public class AT_Context { /** * Sets the top and bottom frame margin . * @ param frameTop margin * @ param frameBottom margin * @ return this to allow chaining */ public AT_Context setFrameTopBottomMargin ( int frameTop , int frameBottom ) { } }
if ( frameTop > - 1 && frameBottom > - 1 ) { this . frameTopMargin = frameTop ; this . frameBottomMargin = frameBottom ; } return this ;
public class AbucoinsAccountServiceRaw { /** * Corresponds to < code > POST deposits / make < / code > * @ param cryptoRequest * @ return * @ throws IOException */ public AbucoinsCryptoDeposit abucoinsDepositMake ( AbucoinsCryptoDepositRequest cryptoRequest ) throws IOException { } }
return abucoinsAuthenticated . depositsMake ( cryptoRequest , exchange . getExchangeSpecification ( ) . getApiKey ( ) , signatureCreator , exchange . getExchangeSpecification ( ) . getPassword ( ) , timestamp ( ) ) ;
public class Fallback { /** * Configures the { @ code fallback } to be executed if execution fails . The { @ code fallback } applies an { @ link * ExecutionAttemptedEvent } . * @ throws NullPointerException if { @ code fallback } is null */ @ SuppressWarnings ( "unchecked" ) public static < R > Fallback < R > of ( CheckedFunction < ExecutionAttemptedEvent < ? extends R > , ? extends R > fallback ) { } }
return new Fallback < > ( Assert . notNull ( ( CheckedFunction ) fallback , "fallback" ) , false ) ;
public class BrSendungsFolgenDeserializer { /** * Resolves the Sendung ids which are needed to get the Sendung details . < br > * The data has these two structures : * < code > data - > viewer - > series - > clipsOnly - > edges [ ] - > node - > id < / code > < br > * < code > data - > viewer - > series - > previousEpisodes - > edges [ ] - > node - > id < / code > */ @ Override public BrIdsDTO deserialize ( final JsonElement aElement , final Type aType , final JsonDeserializationContext aContext ) { } }
final BrIdsDTO results = new BrIdsDTO ( ) ; final JsonObject baseObject = aElement . getAsJsonObject ( ) ; final Optional < JsonObject > series = getSeries ( baseObject ) ; if ( series . isPresent ( ) ) { final Optional < JsonArray > clipsEdges = getClipsEdges ( series . get ( ) ) ; addToResult ( results , clipsEdges ) ; final Optional < JsonArray > previosEpisodesEdges = getPreviousEpisodesEdges ( series . get ( ) ) ; addToResult ( results , previosEpisodesEdges ) ; } return results ;
public class TemporalProperty { /** * Gets the value effective at the specified instant in time . * Returns null if the property had no value at the specified time . * @ param effectivAt * @ return the value of the property at the specified time . */ public T get ( Instant effectivAt ) { } }
Entry < Instant , T > entry = values . floorEntry ( effectivAt ) ; if ( entry == null ) return null ; return entry . getValue ( ) ;
public class IntegrationAccountAssembliesInner { /** * Create or update an assembly for an integration account . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ param assemblyArtifactName The assembly artifact name . * @ param assemblyArtifact The assembly artifact . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AssemblyDefinitionInner object */ public Observable < AssemblyDefinitionInner > createOrUpdateAsync ( String resourceGroupName , String integrationAccountName , String assemblyArtifactName , AssemblyDefinitionInner assemblyArtifact ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , integrationAccountName , assemblyArtifactName , assemblyArtifact ) . map ( new Func1 < ServiceResponse < AssemblyDefinitionInner > , AssemblyDefinitionInner > ( ) { @ Override public AssemblyDefinitionInner call ( ServiceResponse < AssemblyDefinitionInner > response ) { return response . body ( ) ; } } ) ;
public class AbstractController { @ Override public Coordinate getLocation ( HumanInputEvent < ? > event , RenderSpace renderSpace ) { } }
return eventParser . getLocation ( event , renderSpace ) ;
public class FloatingDecimal { /** * Extracts a hexadecimal digit from position < code > position < / code > * of string < code > s < / code > . */ static int getHexDigit ( String s , int position ) { } }
int value = Character . digit ( s . charAt ( position ) , 16 ) ; if ( value <= - 1 || value >= 16 ) { throw new AssertionError ( "Unexpected failure of digit conversion of " + s . charAt ( position ) ) ; } return value ;
public class JBBPBitInputStream { /** * Read number of short items from the input stream . * @ param items number of items to be read from the input stream , if less than * zero then all stream till the end will be read * @ param byteOrder the order of bytes to be used to decode short values * @ return read items as a short array * @ throws IOException it will be thrown for any transport problem during the * operation * @ see JBBPByteOrder # BIG _ ENDIAN * @ see JBBPByteOrder # LITTLE _ ENDIAN */ public short [ ] readShortArray ( final int items , final JBBPByteOrder byteOrder ) throws IOException { } }
int pos = 0 ; if ( items < 0 ) { short [ ] buffer = new short [ INITIAL_ARRAY_BUFFER_SIZE ] ; // till end while ( hasAvailableData ( ) ) { final int next = readUnsignedShort ( byteOrder ) ; if ( buffer . length == pos ) { final short [ ] newbuffer = new short [ buffer . length << 1 ] ; System . arraycopy ( buffer , 0 , newbuffer , 0 , buffer . length ) ; buffer = newbuffer ; } buffer [ pos ++ ] = ( short ) next ; } if ( buffer . length == pos ) { return buffer ; } final short [ ] result = new short [ pos ] ; System . arraycopy ( buffer , 0 , result , 0 , pos ) ; return result ; } else { // number final short [ ] buffer = new short [ items ] ; for ( int i = 0 ; i < items ; i ++ ) { buffer [ i ] = ( short ) readUnsignedShort ( byteOrder ) ; } return buffer ; }
public class RuntimeDataServiceImpl { /** * start * process definition methods */ public Collection < ProcessDefinition > getProcessesByDeploymentId ( String deploymentId , QueryContext queryContext ) { } }
deploymentId = getLatestDeploymentId ( requireNonNull ( deploymentId , DEPLOYMENT_ID_MUST_NOT_BE_NULL ) ) ; List < ProcessDefinition > outputCollection = new ArrayList < ProcessDefinition > ( ) ; CollectionUtils . select ( availableProcesses , new ByDeploymentIdPredicate ( deploymentId , identityProvider . getRoles ( ) ) , outputCollection ) ; applySorting ( outputCollection , queryContext ) ; return applyPaginition ( outputCollection , queryContext ) ;
public class JavadocClassFinder { /** * Override extraFileActions to check for package documentation */ @ Override protected void extraFileActions ( PackageSymbol pack , JavaFileObject fo ) { } }
if ( fo . isNameCompatible ( "package" , JavaFileObject . Kind . HTML ) ) docenv . getPackageDoc ( pack ) . setDocPath ( fo ) ;
public class Transaction { /** * Starts a transaction and obtains the transaction id from the server . */ ApiFuture < Void > begin ( ) { } }
BeginTransactionRequest . Builder beginTransaction = BeginTransactionRequest . newBuilder ( ) ; beginTransaction . setDatabase ( firestore . getDatabaseName ( ) ) ; if ( previousTransactionId != null ) { beginTransaction . getOptionsBuilder ( ) . getReadWriteBuilder ( ) . setRetryTransaction ( previousTransactionId ) ; } ApiFuture < BeginTransactionResponse > transactionBeginFuture = firestore . sendRequest ( beginTransaction . build ( ) , firestore . getClient ( ) . beginTransactionCallable ( ) ) ; return ApiFutures . transform ( transactionBeginFuture , new ApiFunction < BeginTransactionResponse , Void > ( ) { @ Override public Void apply ( BeginTransactionResponse beginTransactionResponse ) { transactionId = beginTransactionResponse . getTransaction ( ) ; pending = true ; return null ; } } ) ;
public class RequestInvoker { /** * 创建请求代理 * @ param proxyInterface 被代理的接口 * @ param classLoader 类加载器 */ @ SuppressWarnings ( "unchecked" ) public static < T > T create ( Class < T > proxyInterface , ClassLoader classLoader ) { } }
Objects . requireNonNull ( proxyInterface , "proxyInterface 不能为null" ) ; if ( ! proxyInterface . isInterface ( ) ) { throw new IllegalArgumentException ( proxyInterface . getName ( ) + "不是一个接口" ) ; } if ( ! proxyInterface . isAnnotationPresent ( Rest . class ) ) { throw new IllegalArgumentException ( proxyInterface . getName ( ) + "必须添加@Rest注解" ) ; } classLoader = classLoader == null ? Thread . currentThread ( ) . getContextClassLoader ( ) : classLoader ; // 当Thread . currentThread . getContextClassLoader ( ) 为null时重新赋值 classLoader = classLoader == null ? RequestInvoker . class . getClassLoader ( ) : classLoader ; return ( T ) Proxy . newProxyInstance ( classLoader , new Class [ ] { proxyInterface , HttpClientConfig . class } , new RequestInvoker ( proxyInterface ) ) ;
public class ServerDnsAliasesInner { /** * Creates a server dns alias . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server that the alias is pointing to . * @ param dnsAliasName The name of the server DNS alias . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ServerDnsAliasInner object */ public Observable < ServerDnsAliasInner > beginCreateOrUpdateAsync ( String resourceGroupName , String serverName , String dnsAliasName ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , dnsAliasName ) . map ( new Func1 < ServiceResponse < ServerDnsAliasInner > , ServerDnsAliasInner > ( ) { @ Override public ServerDnsAliasInner call ( ServiceResponse < ServerDnsAliasInner > response ) { return response . body ( ) ; } } ) ;
public class ExecutionEntityImpl { /** * Static factory method : to be used when a new execution is created for the very first time / * Calling this will make sure no extra db fetches are needed later on , as all collections * will be populated with empty collections . If they would be null , it would trigger * a database fetch for those relationship entities . */ public static ExecutionEntityImpl createWithEmptyRelationshipCollections ( ) { } }
ExecutionEntityImpl execution = new ExecutionEntityImpl ( ) ; execution . executions = new ArrayList < ExecutionEntityImpl > ( 1 ) ; execution . tasks = new ArrayList < TaskEntity > ( 1 ) ; execution . variableInstances = new HashMap < String , VariableInstanceEntity > ( 1 ) ; execution . jobs = new ArrayList < JobEntity > ( 1 ) ; execution . timerJobs = new ArrayList < TimerJobEntity > ( 1 ) ; execution . eventSubscriptions = new ArrayList < EventSubscriptionEntity > ( 1 ) ; execution . identityLinks = new ArrayList < IdentityLinkEntity > ( 1 ) ; return execution ;
public class UserInputInlineMenuButton { /** * Initiates a conversation with the chat awaiting text input * On input executes callback * @ param query unused * @ see Conversation * @ see TextPrompt */ @ Override public void handlePress ( CallbackQuery query ) { } }
executeCallback ( ) ; if ( textCallback != null && inputGiven ) { inputGiven = false ; Conversation . builder ( query . getBotInstance ( ) ) . forWhom ( owner . getBaseMessage ( ) . getChat ( ) ) . silent ( true ) . prompts ( ) . last ( new TextPrompt ( ) { @ Override public boolean process ( ConversationContext context , TextContent input ) { textCallback . accept ( UserInputInlineMenuButton . this , input . getContent ( ) ) ; inputGiven = true ; return false ; } @ Override public SendableMessage promptMessage ( ConversationContext context ) { return null ; } } ) . build ( ) . begin ( ) ; }
public class AtomicDoubleCastExtensions { /** * Convert the given value to { @ code AtomicInteger } . This function is not null - safe . * @ param number a number of { @ code AtomicDouble } type . * @ return the equivalent value to { @ code number } of { @ code AtomicInteger } type . */ @ Pure @ Inline ( value = "new $2($1.intValue())" , imported = AtomicInteger . class ) public static AtomicInteger toAtomicInteger ( AtomicDouble number ) { } }
return new AtomicInteger ( number . intValue ( ) ) ;
public class JaspiServiceImpl { /** * TODO find a good home somewhere */ private static Object getPrivateAttributes ( HttpServletRequest req , String key ) { } }
HttpServletRequest sr = req ; if ( sr instanceof HttpServletRequestWrapper ) { HttpServletRequestWrapper w = ( HttpServletRequestWrapper ) sr ; sr = ( HttpServletRequest ) w . getRequest ( ) ; while ( sr != null && sr instanceof HttpServletRequestWrapper ) sr = ( HttpServletRequest ) ( ( HttpServletRequestWrapper ) sr ) . getRequest ( ) ; } if ( sr != null && sr instanceof IPrivateRequestAttributes ) { return ( ( IPrivateRequestAttributes ) sr ) . getPrivateAttribute ( key ) ; } return null ;
public class SessionManager { /** * not called from XD */ @ Override public Object getSession ( ServletRequest request , ServletResponse response , SessionAffinityContext affinityContext , boolean create ) { } }
/* * Check to see if the request provides a JSESSIONID cookie */ String sessionID = _sam . getInUseSessionID ( request , affinityContext ) ; int sessionVersion = _sam . getInUseSessionVersion ( request , affinityContext ) ; // affinityContext . getRequestedSessionVersion ( ) ; ISession session = null ; /* * If the sessionID is not null , this may be a request with an existing * session . */ if ( sessionID != null ) { session = ( ISession ) getSession ( sessionID , sessionVersion , true , null ) ; if ( session == null && affinityContext . isRequestedSessionIDFromCookie ( ) // perform allIds call and reset sac && ! affinityContext . isAllSessionIdsSetViaSet ( ) ) { // PM89885 add another check for isAllSessionIdsSetViaSet since we may have done a set in HttpSessionContextImpl . getIHttpSession List allSessionIds = _sam . getAllCookieValues ( request ) ; affinityContext . setAllSessionIds ( allSessionIds ) ; } } while ( ( session == null ) && ( _sam . setNextId ( affinityContext ) ) ) { sessionID = _sam . getInUseSessionID ( request , affinityContext ) ; sessionVersion = _sam . getInUseSessionVersion ( request , affinityContext ) ; // affinityContext . getRequestedSessionVersion ( ) ; session = ( ISession ) getSession ( sessionID , sessionVersion , true , null ) ; } /* * If the session is null , create a new one if required . The session manager * will check * to see if an alternate webmodule has a session created using * this session id . If not , the session object returned may have a different * session id from the one supplied . */ if ( ( session == null ) && create ) { boolean reuseThisID = false ; if ( _store . getShouldReuseId ( ) || affinityContext . isResponseIdSet ( ) || affinityContext . isRequestedSessionIDFromSSL ( ) ) { reuseThisID = true ; } session = createISession ( sessionID , sessionVersion , reuseThisID ) ; } return adaptAndSetCookie ( request , response , affinityContext , session ) ;
public class LikeRule { /** * Serialize the state of the object . * @ param out object output stream * @ throws IOException if IOException during serialization */ private void writeObject ( final java . io . ObjectOutputStream out ) throws IOException { } }
out . writeObject ( field ) ; out . writeObject ( pattern . pattern ( ) ) ;
public class PutPipelineDefinitionResult { /** * The validation warnings that are associated with the objects defined in < code > pipelineObjects < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setValidationWarnings ( java . util . Collection ) } or { @ link # withValidationWarnings ( java . util . Collection ) } if * you want to override the existing values . * @ param validationWarnings * The validation warnings that are associated with the objects defined in < code > pipelineObjects < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutPipelineDefinitionResult withValidationWarnings ( ValidationWarning ... validationWarnings ) { } }
if ( this . validationWarnings == null ) { setValidationWarnings ( new com . amazonaws . internal . SdkInternalList < ValidationWarning > ( validationWarnings . length ) ) ; } for ( ValidationWarning ele : validationWarnings ) { this . validationWarnings . add ( ele ) ; } return this ;
public class IonBinary { /** * this method computes the output length of this timestamp value * in the Ion binary format . It does not include the length of * the typedesc byte that preceeds the actual value . The output * length of a null value is 0 , as a result this this . * @ param di may be null */ public static int lenIonTimestamp ( Timestamp di ) { } }
if ( di == null ) return 0 ; int len = 0 ; switch ( di . getPrecision ( ) ) { case FRACTION : case SECOND : { BigDecimal fraction = di . getFractionalSecond ( ) ; if ( fraction != null ) { assert fraction . signum ( ) >= 0 && ! fraction . equals ( BigDecimal . ZERO ) : "Bad timestamp fraction: " + fraction ; // Since the fraction is not 0d0 , at least one subfield of the // exponent and mantissa is non - zero , so this will always write at // least one byte . int fracLen = IonBinary . lenIonDecimal ( fraction ) ; assert fracLen > 0 ; len += fracLen ; } len ++ ; // len of seconds < 60 } case MINUTE : len += 2 ; // len of hour and minutes ( both < 127) case DAY : len += 1 ; // len of month and day ( both < 127) case MONTH : len += 1 ; // len of month and day ( both < 127) case YEAR : len += IonBinary . lenVarUInt ( di . getZYear ( ) ) ; } Integer offset = di . getLocalOffset ( ) ; if ( offset == null ) { len ++ ; // room for the - 0 ( i . e . offset is " no specified offset " ) } else if ( offset == 0 ) { len ++ ; } else { len += IonBinary . lenVarInt ( offset . longValue ( ) ) ; } return len ;
public class S { /** * Format the number with specified template , pattern , language and locale * @ param number * @ param pattern * @ param locale * @ return the formatted String * @ see DecimalFormatSymbols */ public static String format ( ITemplate template , Number number , String pattern , Locale locale ) { } }
if ( null == number ) { throw new NullPointerException ( ) ; } if ( null == locale ) { locale = I18N . locale ( template ) ; } NumberFormat nf ; if ( null == pattern ) nf = NumberFormat . getNumberInstance ( locale ) ; else { DecimalFormatSymbols symbols = new DecimalFormatSymbols ( locale ) ; nf = new DecimalFormat ( pattern , symbols ) ; } return nf . format ( number ) ;
public class AbstractCacheableLockManager { /** * { @ inheritDoc } */ public void backup ( File storageDir ) throws BackupException { } }
LOG . info ( "Start to backup lock data" ) ; ObjectOutputStream out = null ; try { File contentFile = new File ( storageDir , "CacheLocks" + DBBackup . CONTENT_FILE_SUFFIX ) ; out = new ObjectOutputStream ( new BufferedOutputStream ( PrivilegedFileHelper . fileOutputStream ( contentFile ) ) ) ; List < LockData > locks = getLockList ( ) ; out . writeInt ( locks . size ( ) ) ; for ( LockData lockData : locks ) { lockData . writeExternal ( out ) ; } } catch ( FileNotFoundException e ) { throw new BackupException ( e ) ; } catch ( IOException e ) { throw new BackupException ( e ) ; } finally { if ( out != null ) { try { out . flush ( ) ; out . close ( ) ; } catch ( IOException e ) { LOG . error ( "Can't close output stream" , e ) ; } } }
public class Binder { /** * Drop from the end of the argument list a number of arguments . * @ param count the number of arguments to drop * @ return a new Binder */ public Binder dropLast ( int count ) { } }
assert count <= type ( ) . parameterCount ( ) ; return drop ( type ( ) . parameterCount ( ) - count , count ) ;
public class ArrayContext { /** * Create an array of < code > int < / code > values from the given array of * { @ link Integer } values . Each value in the given array will be unwrapped * to its primitive form . If any values within the given array are * < code > null < / code > , then they will be ignored and the resulting array will * be smaller than the given array . * @ param integers The array of integers to convert * @ return The array of primitive ints */ public int [ ] createIntArray ( Integer [ ] integers ) { } }
int [ ] result ; if ( integers != null ) { int count = 0 ; int inputLength = integers . length ; int [ ] tempResult = new int [ inputLength ] ; for ( int i = 0 ; i < inputLength ; i ++ ) { if ( integers [ i ] != null ) { tempResult [ count ] = integers [ i ] . intValue ( ) ; count ++ ; } } result = tempResult ; if ( count != inputLength ) { result = new int [ count ] ; System . arraycopy ( tempResult , 0 , result , 0 , count ) ; } } else { result = null ; } return result ;
public class MetricKeyDataPointsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( MetricKeyDataPoints metricKeyDataPoints , ProtocolMarshaller protocolMarshaller ) { } }
if ( metricKeyDataPoints == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( metricKeyDataPoints . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( metricKeyDataPoints . getDataPoints ( ) , DATAPOINTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KunderaQueryUtils { /** * Checks for group by . * @ param jpqlExpression * the jpql expression * @ return true , if successful */ public static boolean hasGroupBy ( JPQLExpression jpqlExpression ) { } }
if ( isSelectStatement ( jpqlExpression ) ) { return ( ( SelectStatement ) jpqlExpression . getQueryStatement ( ) ) . hasGroupByClause ( ) ; } return false ;
public class PathParamDispatch { /** * prepare to resolve path parameters * @ param parameterResolveFactory * @ param param * @ param route * @ param args */ @ Override public void dispatch ( ParameterResolveFactory parameterResolveFactory , ActionParam param , Route route , Object [ ] args ) { } }
if ( ! route . isRegex ( ) ) return ; Matcher matcher = route . getMatcher ( ) ; String [ ] pathParameters = new String [ matcher . groupCount ( ) ] ; for ( int i = 1 , len = matcher . groupCount ( ) ; i <= len ; i ++ ) { pathParameters [ i - 1 ] = matcher . group ( i ) ; } Map < String , String > path = new HashMap < > ( ) ; route . getRegexRoute ( ) . getNames ( ) . forEach ( name -> CollectionKit . MapAdd ( path , name , matcher . group ( name ) ) ) ; param . getRequest ( ) . setPathNamedParameters ( path ) ; param . getRequest ( ) . setPathParameters ( pathParameters ) ;
public class Select { /** * This method will set the order of the options generated in the select . It must contain a * comma separated string listing the order or the stages that the repeating types are processed . * These values are " option " , " dataSource " , " default " , and " null " . * @ param order comma separated ordering of items when there is a repeating select . * @ jsptagref . attributedescription Define the order of options generated for a repeating Select . * It must contain a comma separated string listing the order or the stages that the repeating types * are processed . These values are " option " , " dataSource " , " default " , and " null " . For example , * < pre > repeatingOrder = " dataSource , option " < / pre > * Then a & lt ; netui : selectOption > element could set the repeatingType attribute to " dataSource " * while another is defined for " option " . * @ jsptagref . databindable false * @ jsptagref . attributesyntaxvalue < i > string _ order < / i > * @ netui : attribute required = " false " rtexprvalue = " true " * description = " Define the order of options for a repeating Select " */ public void setRepeatingOrder ( String order ) throws JspException { } }
String [ ] options = order . split ( "," ) ; RepeatingStages [ ] stageOrder = new RepeatingStages [ options . length + 1 ] ; stageOrder [ 0 ] = RepeatingStages . BEFORE ; for ( int i = 0 ; i < options . length ; i ++ ) { String opt = options [ i ] . trim ( ) ; stageOrder [ i + 1 ] = RepeatingStages . parseString ( opt ) ; if ( stageOrder [ i + 1 ] == null ) { String s = Bundle . getString ( "Tags_SelectBadRepeatingStage" , new Object [ ] { opt } ) ; registerTagError ( s , null ) ; } } _order = stageOrder ;
public class Instance { /** * Detaches a disk from this instance . * @ return a zone operation if the detach request was issued correctly , { @ code null } if the * instance was not found * @ throws ComputeException upon failure */ public Operation detachDisk ( String deviceName , OperationOption ... options ) { } }
return compute . detachDisk ( getInstanceId ( ) , deviceName , options ) ;
public class DigestUtils { /** * Calculates digest and returns the value as a hex string . * @ param input input bytes * @ param digest digest algorithm * @ return digest as a hex string */ public static String dgstHex ( byte [ ] input , Digest digest ) { } }
checkNotNull ( input ) ; byte [ ] dgstBytes = dgst ( input , digest ) ; return BaseEncoding . base16 ( ) . encode ( dgstBytes ) ;
public class BdbStorageConfiguration { /** * When a reservation is made , we need to shrink the shared cache * accordingly to guarantee memory foot print of the new store . NOTE : This * is not an instantaneous operation . Changes will take effect only when * traffic is thrown and eviction happens . ( Won ' t happen until Network ports * are opened anyway which is rightfully done after storage service ) . When * changing this dynamically , we might want to block until the shared cache * shrinks enough */ private void adjustCacheSizes ( ) { } }
long newSharedCacheSize = voldemortConfig . getBdbCacheSize ( ) - this . reservedCacheSize ; logger . info ( "Setting the shared cache size to " + newSharedCacheSize ) ; for ( Environment environment : unreservedStores ) { EnvironmentMutableConfig mConfig = environment . getMutableConfig ( ) ; mConfig . setCacheSize ( newSharedCacheSize ) ; environment . setMutableConfig ( mConfig ) ; }
public class SRTServletRequest { /** * / * ( non - Javadoc ) * @ see javax . servlet . ServletRequest # getProtocol ( ) */ public String getProtocol ( ) { } }
if ( WCCustomProperties . CHECK_REQUEST_OBJECT_IN_USE ) { checkRequestObjectInUse ( ) ; } // 321485 String protocol = this . _request . getProtocol ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "getProtocol" , "this->" + this + ": " + " protocol --> " + protocol ) ; } return protocol ;
public class AbstractEventSerializer { /** * Parses the { @ link UserIdentity } in CloudTrailEventData * @ param eventData { @ link CloudTrailEventData } needs to parse . * @ throws IOException */ private void parseUserIdentity ( CloudTrailEventData eventData ) throws IOException { } }
JsonToken nextToken = jsonParser . nextToken ( ) ; if ( nextToken == JsonToken . VALUE_NULL ) { eventData . add ( CloudTrailEventField . userIdentity . name ( ) , null ) ; return ; } if ( nextToken != JsonToken . START_OBJECT ) { throw new JsonParseException ( "Not a UserIdentity object" , jsonParser . getCurrentLocation ( ) ) ; } UserIdentity userIdentity = new UserIdentity ( ) ; while ( jsonParser . nextToken ( ) != JsonToken . END_OBJECT ) { String key = jsonParser . getCurrentName ( ) ; switch ( key ) { case "type" : userIdentity . add ( CloudTrailEventField . type . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "principalId" : userIdentity . add ( CloudTrailEventField . principalId . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "arn" : userIdentity . add ( CloudTrailEventField . arn . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "accountId" : userIdentity . add ( CloudTrailEventField . accountId . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "accessKeyId" : userIdentity . add ( CloudTrailEventField . accessKeyId . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "userName" : userIdentity . add ( CloudTrailEventField . userName . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "sessionContext" : this . parseSessionContext ( userIdentity ) ; break ; case "invokedBy" : userIdentity . add ( CloudTrailEventField . invokedBy . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; case "identityProvider" : userIdentity . add ( CloudTrailEventField . identityProvider . name ( ) , jsonParser . nextTextValue ( ) ) ; break ; default : userIdentity . add ( key , parseDefaultValue ( key ) ) ; break ; } } eventData . add ( CloudTrailEventField . userIdentity . name ( ) , userIdentity ) ;
public class DatastoreXmlExternalizer { /** * Determines if the given datastore is externalizable by this object . * @ param datastore * @ return */ public boolean isExternalizable ( final Datastore datastore ) { } }
if ( datastore == null ) { return false ; } if ( datastore instanceof JdbcDatastore ) { return true ; } if ( datastore instanceof CsvDatastore ) { final Resource resource = ( ( CsvDatastore ) datastore ) . getResource ( ) ; if ( resource instanceof FileResource ) { return true ; } } if ( datastore instanceof ExcelDatastore ) { final Resource resource = ( ( ExcelDatastore ) datastore ) . getResource ( ) ; if ( resource instanceof FileResource ) { return true ; } } return false ;
public class ExecutionVertex { /** * Check whether the InputDependencyConstraint is satisfied for this vertex . * @ return whether the input constraint is satisfied */ boolean checkInputDependencyConstraints ( ) { } }
if ( getInputDependencyConstraint ( ) == InputDependencyConstraint . ANY ) { // InputDependencyConstraint = = ANY return IntStream . range ( 0 , inputEdges . length ) . anyMatch ( this :: isInputConsumable ) ; } else { // InputDependencyConstraint = = ALL return IntStream . range ( 0 , inputEdges . length ) . allMatch ( this :: isInputConsumable ) ; }
public class JingleDescription { /** * Adds a audio payload type to the packet . * @ param pt the audio payload type to add . */ public void addPayloadType ( final PayloadType pt ) { } }
synchronized ( payloads ) { if ( pt == null ) { LOGGER . severe ( "Null payload type" ) ; } else { payloads . add ( pt ) ; } }
public class AgentRunner { /** * Run an { @ link Agent } . * This method does not return until the run loop is stopped via { @ link # close ( ) } . */ public void run ( ) { } }
try { if ( ! thread . compareAndSet ( null , Thread . currentThread ( ) ) ) { return ; } final IdleStrategy idleStrategy = this . idleStrategy ; final Agent agent = this . agent ; try { agent . onStart ( ) ; } catch ( final Throwable throwable ) { errorHandler . onError ( throwable ) ; isRunning = false ; } while ( isRunning ) { if ( doDutyCycle ( idleStrategy , agent ) ) { break ; } } try { agent . onClose ( ) ; } catch ( final Throwable throwable ) { errorHandler . onError ( throwable ) ; } } finally { isClosed = true ; }
public class ConfigUtils { /** * Translate topology config . * @ param heron the heron config object to receive the results . */ private static void doTopologyLevelTranslation ( Config heronConfig ) { } }
if ( heronConfig . containsKey ( org . apache . storm . Config . TOPOLOGY_ACKER_EXECUTORS ) ) { Integer nAckers = Utils . getInt ( heronConfig . get ( org . apache . storm . Config . TOPOLOGY_ACKER_EXECUTORS ) ) ; if ( nAckers > 0 ) { org . apache . heron . api . Config . setTopologyReliabilityMode ( heronConfig , org . apache . heron . api . Config . TopologyReliabilityMode . ATLEAST_ONCE ) ; } else { org . apache . heron . api . Config . setTopologyReliabilityMode ( heronConfig , org . apache . heron . api . Config . TopologyReliabilityMode . ATMOST_ONCE ) ; } } else { org . apache . heron . api . Config . setTopologyReliabilityMode ( heronConfig , org . apache . heron . api . Config . TopologyReliabilityMode . ATMOST_ONCE ) ; }
public class MetadataServiceListVersion { /** * Gets the version for a given version string , such as " 1.0 " . * @ param version * version string * @ return version for the given string */ public static final MetadataServiceListVersion valueOf ( String version ) { } }
String [ ] components = version . split ( "\\." ) ; return valueOf ( Integer . valueOf ( components [ 0 ] ) , Integer . valueOf ( components [ 1 ] ) ) ;
public class PackageManagerUtils { /** * Checks if the device has a step counter sensor . * @ param context the context . * @ return { @ code true } if the device has a step counter sensor . */ @ TargetApi ( Build . VERSION_CODES . KITKAT ) public static boolean hasStepCounterSensorFeature ( Context context ) { } }
return hasStepCounterSensorFeature ( context . getPackageManager ( ) ) ;
public class ParseAnnotator { /** * load mappings of constituent names */ private void loadMappings ( ) { } }
for ( int i = 0 ; i < mappings . length ; i ++ ) { String [ ] pair = mappings [ i ] . split ( ";" ) ; if ( pair . length < 2 ) { try { throw new AnnotatorConfigurationException ( ) ; } catch ( AnnotatorConfigurationException e ) { LOGGER . error ( "[OpenNLP Parser: ]" + e . getMessage ( ) ) ; e . printStackTrace ( ) ; } } else { String consTag = pair [ 0 ] ; String casTag = pair [ 1 ] ; mapTable . put ( consTag , casTag ) ; } }
public class SingleLinkageClusterer { /** * Merge 2 rows / columns of the matrix by the linkage function ( see { @ link # link ( double , double ) } * @ param closestPair */ private void merge ( LinkedPair closestPair ) { } }
int first = closestPair . getFirst ( ) ; int second = closestPair . getSecond ( ) ; for ( int other = 0 ; other < numItems ; other ++ ) { matrix [ Math . min ( first , other ) ] [ Math . max ( first , other ) ] = link ( getDistance ( first , other ) , getDistance ( second , other ) ) ; }
public class Humanize { /** * Same as { @ link # paceFormat ( Number , long ) } * @ param value * The number of occurrences within the specified interval * @ param interval * The interval in milliseconds * @ return an human readable textual representation of the pace */ public static String paceFormat ( final Number value , final TimeMillis interval ) { } }
return paceFormat ( value , interval . millis ( ) ) ;
public class CheckContextGenerator { /** * Returns predictive context for deciding whether the specified constituents between the specified start and end index * can be combined to form a new constituent of the specified type . * @ param constituents The constituents which have yet to be combined into new constituents . * @ param type The type of the new constituent proposed . * @ param start The first constituent of the proposed constituent . * @ param end The last constituent of the proposed constituent . * @ return The predictive context for deciding whether a new constituent should be created . */ public String [ ] getContext ( Parse [ ] constituents , String type , int start , int end ) { } }
int ps = constituents . length ; List features = new ArrayList ( 100 ) ; // default features . add ( "default" ) ; Parse pstart = constituents [ start ] ; Parse pend = constituents [ end ] ; checkcons ( pstart , "begin" , type , features ) ; checkcons ( pend , "last" , type , features ) ; StringBuffer production = new StringBuffer ( 20 ) ; StringBuffer punctProduction = new StringBuffer ( 20 ) ; production . append ( "p=" ) . append ( type ) . append ( "->" ) ; punctProduction . append ( "pp=" ) . append ( type ) . append ( "->" ) ; for ( int pi = start ; pi < end ; pi ++ ) { Parse p = constituents [ pi ] ; checkcons ( p , pend , type , features ) ; production . append ( p . getType ( ) ) . append ( "," ) ; punctProduction . append ( p . getType ( ) ) . append ( "," ) ; Collection nextPunct = p . getNextPunctuationSet ( ) ; if ( nextPunct != null ) { for ( Iterator pit = nextPunct . iterator ( ) ; pit . hasNext ( ) ; ) { Parse punct = ( Parse ) pit . next ( ) ; punctProduction . append ( punct . getType ( ) ) . append ( "," ) ; } } } production . append ( pend . getType ( ) ) ; punctProduction . append ( pend . getType ( ) ) ; features . add ( production . toString ( ) ) ; features . add ( punctProduction . toString ( ) ) ; Parse p_2 = null ; Parse p_1 = null ; Parse p1 = null ; Parse p2 = null ; Collection p1s = constituents [ end ] . getNextPunctuationSet ( ) ; Collection p2s = null ; Collection p_1s = constituents [ start ] . getPreviousPunctuationSet ( ) ; Collection p_2s = null ; if ( start - 2 >= 0 ) { p_2 = constituents [ start - 2 ] ; } if ( start - 1 >= 0 ) { p_1 = constituents [ start - 1 ] ; p_2s = p_1 . getPreviousPunctuationSet ( ) ; } if ( end + 1 < ps ) { p1 = constituents [ end + 1 ] ; p2s = p1 . getNextPunctuationSet ( ) ; } if ( end + 2 < ps ) { p2 = constituents [ end + 2 ] ; } surround ( p_1 , - 1 , type , p_1s , features ) ; surround ( p_2 , - 2 , type , p_2s , features ) ; surround ( p1 , 1 , type , p1s , features ) ; surround ( p2 , 2 , type , p2s , features ) ; return ( ( String [ ] ) features . toArray ( new String [ features . size ( ) ] ) ) ;
public class JsonObject { /** * Stores the { @ link Object } value as encrypted identified by the field name . * Note that the value is checked and a { @ link IllegalArgumentException } is thrown if not supported . * Note : Use of the Field Level Encryption functionality provided in the * com . couchbase . client . encryption namespace provided by Couchbase is * subject to the Couchbase Inc . Enterprise Subscription License Agreement * at https : / / www . couchbase . com / ESLA - 11132015. * @ param name the name of the JSON field . * @ param value the value of the JSON field . * @ param providerName Crypto provider name for encryption . * @ return the { @ link JsonObject } . */ public JsonObject putAndEncrypt ( final String name , final Object value , String providerName ) { } }
addValueEncryptionInfo ( name , providerName , true ) ; if ( this == value ) { throw new IllegalArgumentException ( "Cannot put self" ) ; } else if ( value == JsonValue . NULL ) { putNull ( name ) ; } else if ( checkType ( value ) ) { content . put ( name , value ) ; } else { throw new IllegalArgumentException ( "Unsupported type for JsonObject: " + value . getClass ( ) ) ; } return this ;
public class GroovyScriptEngine { /** * Get the class of the scriptName in question , so that you can instantiate * Groovy objects with caching and reloading . * @ param scriptName resource name pointing to the script * @ return the loaded scriptName as a compiled class * @ throws ResourceException if there is a problem accessing the script * @ throws ScriptException if there is a problem parsing the script */ public Class loadScriptByName ( String scriptName ) throws ResourceException , ScriptException { } }
URLConnection conn = rc . getResourceConnection ( scriptName ) ; String path = conn . getURL ( ) . toExternalForm ( ) ; ScriptCacheEntry entry = scriptCache . get ( path ) ; Class clazz = null ; if ( entry != null ) clazz = entry . scriptClass ; try { if ( isSourceNewer ( entry ) ) { try { String encoding = conn . getContentEncoding ( ) != null ? conn . getContentEncoding ( ) : config . getSourceEncoding ( ) ; String content = IOGroovyMethods . getText ( conn . getInputStream ( ) , encoding ) ; clazz = groovyLoader . parseClass ( content , path ) ; } catch ( IOException e ) { throw new ResourceException ( e ) ; } } } finally { forceClose ( conn ) ; } return clazz ;
public class ConverterManager { /** * Gets the best converter for the object specified . * @ param object the object to convert * @ return the converter to use * @ throws IllegalArgumentException if no suitable converter * @ throws IllegalStateException if multiple converters match the type * equally well */ public PartialConverter getPartialConverter ( Object object ) { } }
PartialConverter converter = ( PartialConverter ) iPartialConverters . select ( object == null ? null : object . getClass ( ) ) ; if ( converter != null ) { return converter ; } throw new IllegalArgumentException ( "No partial converter found for type: " + ( object == null ? "null" : object . getClass ( ) . getName ( ) ) ) ;
public class IndexingStrategy { /** * Determines which { @ link Impact } s follow from a set of changes . * @ param changes The { @ link Impact } s of which the impact needs to be determined * @ param dependencyModel { @ link IndexDependencyModel } to determine which entities depend on which * entities * @ return Set < Impact > containing the impact of the changes */ Set < Impact > determineImpact ( Set < Impact > changes , IndexDependencyModel dependencyModel ) { } }
Stopwatch sw = Stopwatch . createStarted ( ) ; Map < Boolean , List < Impact > > split = changes . stream ( ) . collect ( partitioningBy ( Impact :: isWholeRepository ) ) ; ImmutableSet < String > allEntityTypeIds = changes . stream ( ) . map ( Impact :: getEntityTypeId ) . collect ( toImmutableSet ( ) ) ; Set < String > dependentEntities = allEntityTypeIds . stream ( ) . flatMap ( dependencyModel :: getEntityTypesDependentOn ) . collect ( toImmutableSet ( ) ) ; Set < Impact > result = collectResult ( split . get ( false ) , split . get ( true ) , dependentEntities ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Determined {} necessary actions in {}" , result . size ( ) , sw ) ; } return result ;
public class IceAgent { /** * Gets a media stream by name * @ param streamName * The name of the media stream * @ return The media stream . Returns null , if no media stream exists with * such name . */ public IceMediaStream getMediaStream ( String streamName ) { } }
IceMediaStream mediaStream ; synchronized ( mediaStreams ) { mediaStream = this . mediaStreams . get ( streamName ) ; } return mediaStream ;
public class QRDecompositionHouseholderTran_DDRM { /** * To decompose the matrix ' A ' it must have full rank . ' A ' is a ' m ' by ' n ' matrix . * It requires about 2n * m < sup > 2 < / sup > - 2m < sup > 2 < / sup > / 3 flops . * The matrix provided here can be of different * dimension than the one specified in the constructor . It just has to be smaller than or equal * to it . */ @ Override public boolean decompose ( DMatrixRMaj A ) { } }
setExpectedMaxSize ( A . numRows , A . numCols ) ; CommonOps_DDRM . transpose ( A , QR ) ; error = false ; for ( int j = 0 ; j < minLength ; j ++ ) { householder ( j ) ; updateA ( j ) ; } return ! error ;
public class JellyBuilder { /** * Includes the specified adjunct . * This method is useful for including adjunct dynamically on demand . */ public void adjunct ( String name ) throws IOException , SAXException { } }
try { AdjunctsInPage aip = AdjunctsInPage . get ( ) ; aip . generate ( output , name ) ; } catch ( NoSuchAdjunctException e ) { // that ' s OK . }
public class LetterState { /** * Consumes a character and returns the next state for the parser . * @ param ch * next character * @ return the configured nextState if ch is the expected character or the * configure noMatchState otherwise . */ @ Override public AbstractParserState consume ( final char ch ) { } }
if ( ch == this . thisLetter ) { return this . nextState ; } if ( ch == '\n' ) { getParser ( ) . getNewLineState ( ) ; } return this . noMatchState ;
public class MQLinkHandler { /** * Initialize non - persistent fields . These fields are common to both MS * reconstitution of DestinationHandlers and initial creation . * @ param messageProcessor the message processor instance * @ param durableSubscriptionsTable the topicspace durable subscriptions * HashMap from the DestinationManager . * @ param transaction the transaction to use for non persistent * initialization . Can be null , in which case an auto transaction * will be used . * @ throws MessageStoreException if there was an error interacting with the * Message Store . * @ throws SIStoreException if there was a transaction error . */ void initializeNonPersistent ( MessageProcessor messageProcessor , HashMap durableSubscriptionsTable , TransactionCommon transaction ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "initializeNonPersistent" , new Object [ ] { messageProcessor , durableSubscriptionsTable , transaction } ) ; // Required to pick where to send messages too _mqLinkManager = messageProcessor . getMQLinkManager ( ) ; // Required to pick where to send messages too _linkManager = messageProcessor . getLinkManager ( ) ; super . initializeNonPersistent ( messageProcessor , durableSubscriptionsTable , transaction ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initializeNonPersistent" ) ;
public class BeanHandler { /** * Creates the parameter ' s value . * @ param argument the argument * @ param context the current HTTP context * @ param engine the converter * @ return the created object */ @ Override public Object create ( ActionParameter argument , Context context , ParameterFactories engine ) { } }
Object object = createNewInstance ( argument . getRawType ( ) , context , engine ) ; for ( Method method : argument . getRawType ( ) . getMethods ( ) ) { if ( method . getName ( ) . startsWith ( SETTER_PREFIX ) ) { if ( method . getParameterTypes ( ) . length != 1 ) { LOGGER . warn ( "The class {} has a setter method called {} but with too many parameters to be " + "injected with the 'BeanParameter' annotation" , argument . getRawType ( ) . getName ( ) , method . getName ( ) ) ; continue ; } // Only 1 parameter Annotation [ ] annotation = method . getParameterAnnotations ( ) [ 0 ] ; Class < ? > typesOfParameter = method . getParameterTypes ( ) [ 0 ] ; Type genericTypeOfParameter = method . getGenericParameterTypes ( ) [ 0 ] ; ActionParameter parameter = ActionParameter . from ( method , annotation , typesOfParameter , genericTypeOfParameter ) ; // An exception is thrown if we can ' t build the parameter object . Object value = Bindings . create ( parameter , context , engine ) ; if ( value != null ) { inject ( object , method , value ) ; } } } return object ;
public class AdviceActivity { /** * Puts the aspect advice bean . * @ param aspectId the aspect id * @ param adviceBean the advice bean */ protected void putAspectAdviceBean ( String aspectId , Object adviceBean ) { } }
if ( aspectAdviceResult == null ) { aspectAdviceResult = new AspectAdviceResult ( ) ; } aspectAdviceResult . putAspectAdviceBean ( aspectId , adviceBean ) ;
public class HiveDataset { /** * Resolve { @ value # DATABASE _ TOKEN } and { @ value # TABLE _ TOKEN } in < code > rawString < / code > to { @ link Table # getDbName ( ) } * and { @ link Table # getTableName ( ) } */ public static String resolveTemplate ( String rawString , Table table ) { } }
if ( StringUtils . isBlank ( rawString ) ) { return rawString ; } return StringUtils . replaceEach ( rawString , new String [ ] { DATABASE_TOKEN , TABLE_TOKEN } , new String [ ] { table . getDbName ( ) , table . getTableName ( ) } ) ;
public class SslContext { /** * Creates a new server - side { @ link SslContext } . * @ param provider the { @ link SslContext } implementation to use . * { @ code null } to use the current default one . * @ param certChainFile an X . 509 certificate chain file in PEM format * @ param keyFile a PKCS # 8 private key file in PEM format * @ param keyPassword the password of the { @ code keyFile } . * { @ code null } if it ' s not password - protected . * @ return a new server - side { @ link SslContext } * @ deprecated Replaced by { @ link SslContextBuilder } */ @ Deprecated public static SslContext newServerContext ( SslProvider provider , File certChainFile , File keyFile , String keyPassword ) throws SSLException { } }
return newServerContext ( provider , certChainFile , keyFile , keyPassword , null , IdentityCipherSuiteFilter . INSTANCE , null , 0 , 0 ) ;
public class GenerateBomMojo { /** * Collects dependencies , including transitives . * Project dependencies retain their scope , while test only dependencies ( including transitives ) will have test scope . * @ param projectDependencies * @ return */ private Set < Artifact > getDependencies ( final Set < Artifact > projectDependencies ) { } }
Set < Artifact > result = new LinkedHashSet < Artifact > ( projectDependencies ) ; Set < Artifact > testDependencies = dependenciesWithScope ( projectDependencies , Artifact . SCOPE_TEST ) ; Set < Artifact > nonTestDependencies = allBut ( projectDependencies , testDependencies ) ; Set < Artifact > testTransitives = resolve ( testDependencies ) ; Set < Artifact > nonTestTransitives = resolve ( nonTestDependencies ) ; Set < Artifact > testOnlyDependencies = allBut ( testTransitives , nonTestTransitives ) ; for ( Artifact testOnly : testOnlyDependencies ) { result . add ( new DefaultArtifact ( testOnly . getGroupId ( ) , testOnly . getArtifactId ( ) , testOnly . getVersion ( ) , Artifact . SCOPE_TEST , testOnly . getType ( ) , testOnly . getClassifier ( ) , testOnly . getArtifactHandler ( ) ) ) ; } result . addAll ( resolve ( projectDependencies ) ) ; return result ;
public class MemtableAllocator { /** * Indicate the memory and resources owned by this allocator are no longer referenced , * and can be reclaimed / reused . */ public void setDiscarded ( ) { } }
state = state . transition ( LifeCycle . DISCARDED ) ; // release any memory owned by this allocator ; automatically signals waiters onHeap . releaseAll ( ) ; offHeap . releaseAll ( ) ;
public class GraphHopper { /** * Does the preparation and creates the location index */ public void postProcessing ( ) { } }
// Later : move this into the GraphStorage . optimize method // Or : Doing it after preparation to optimize shortcuts too . But not possible yet # 12 if ( sortGraph ) { if ( ghStorage . isCHPossible ( ) && isCHPrepared ( ) ) throw new IllegalArgumentException ( "Sorting a prepared CHGraph is not possible yet. See #12" ) ; GraphHopperStorage newGraph = GHUtility . newStorage ( ghStorage ) ; GHUtility . sortDFS ( ghStorage , newGraph ) ; logger . info ( "graph sorted (" + getMemInfo ( ) + ")" ) ; ghStorage = newGraph ; } if ( hasElevation ( ) ) { interpolateBridgesAndOrTunnels ( ) ; } initLocationIndex ( ) ; if ( chFactoryDecorator . isEnabled ( ) ) chFactoryDecorator . createPreparations ( ghStorage ) ; if ( ! isCHPrepared ( ) ) prepareCH ( ) ; if ( lmFactoryDecorator . isEnabled ( ) ) lmFactoryDecorator . createPreparations ( ghStorage , locationIndex ) ; loadOrPrepareLM ( ) ;
public class JDBCSQLXML { /** * Retrieves a new DOMSource for reading the XML value designated by this * SQLXML instance . < p > * @ param sourceClass The class of the source * @ throws java . sql . SQLException if there is an error processing the XML * value or if the given < tt > sourceClass < / tt > is not supported . * @ return a new DOMSource for reading the XML value designated by this * SQLXML instance */ @ SuppressWarnings ( "unchecked" ) protected < T extends Source > T createDOMSource ( Class < T > sourceClass ) throws SQLException { } }
DOMSource source = null ; try { source = ( sourceClass == null ) ? new DOMSource ( ) : ( DOMSource ) sourceClass . newInstance ( ) ; } catch ( SecurityException ex ) { throw Exceptions . sourceInstantiation ( ex ) ; } catch ( IllegalAccessException ex ) { throw Exceptions . sourceInstantiation ( ex ) ; } catch ( InstantiationException ex ) { throw Exceptions . sourceInstantiation ( ex ) ; } catch ( ClassCastException ex ) { throw Exceptions . sourceInstantiation ( ex ) ; } Transformer transformer = JDBCSQLXML . getIdentityTransformer ( ) ; InputStream inputStream = this . getBinaryStreamImpl ( ) ; StreamSource streamSource = new StreamSource ( ) ; DOMResult domResult = new DOMResult ( ) ; streamSource . setInputStream ( inputStream ) ; try { transformer . transform ( streamSource , domResult ) ; } catch ( TransformerException ex ) { throw Exceptions . transformFailed ( ex ) ; } source . setNode ( domResult . getNode ( ) ) ; return ( T ) source ;
public class KeyInfo { /** * Set up the screen input fields . */ public void setupFields ( ) { } }
FieldInfo field = null ; field = new FieldInfo ( this , ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , LAST_CHANGED , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Date . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DELETED , 10 , null , new Boolean ( false ) ) ; field . setDataClass ( Boolean . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , KEY_FILENAME , 40 , null , null ) ; field = new FieldInfo ( this , KEY_NUMBER , 2 , null , null ) ; field . setDataClass ( Short . class ) ; field = new FieldInfo ( this , KEY_FIELD_1 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_2 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_3 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_4 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_5 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_6 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_7 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_8 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_FIELD_9 , 40 , null , null ) ; field = new FieldInfo ( this , KEY_NAME , 40 , null , null ) ; field = new FieldInfo ( this , KEY_TYPE , 1 , null , null ) ; field = new FieldInfo ( this , INCLUDE_SCOPE , 10 , null , new Integer ( 0x004 ) ) ; field . setDataClass ( Integer . class ) ;
public class UpdateDatasetRequest { /** * A list of " DatasetAction " objects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setActions ( java . util . Collection ) } or { @ link # withActions ( java . util . Collection ) } if you want to override * the existing values . * @ param actions * A list of " DatasetAction " objects . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateDatasetRequest withActions ( DatasetAction ... actions ) { } }
if ( this . actions == null ) { setActions ( new java . util . ArrayList < DatasetAction > ( actions . length ) ) ; } for ( DatasetAction ele : actions ) { this . actions . add ( ele ) ; } return this ;
public class ConsonantUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */ / * / public static boolean is_shar ( String str ) { } }
if ( str . equals ( "S" ) || str . equals ( "z" ) || str . equals ( "s" ) ) return true ; return false ;
public class Pager { /** * executes the list call and sets the continue variable for next list call */ private ApiListType executeRequest ( Call call ) throws IOException , ApiException { } }
return client . handleResponse ( call . execute ( ) , listType ) ;
public class ClassGraph { /** * Returns all program element docs that have a visibility greater or * equal than the specified level */ private < T extends ProgramElementDoc > List < T > filterByVisibility ( T [ ] docs , Visibility visibility ) { } }
if ( visibility == Visibility . PRIVATE ) return Arrays . asList ( docs ) ; List < T > filtered = new ArrayList < T > ( ) ; for ( T doc : docs ) { if ( Visibility . get ( doc ) . compareTo ( visibility ) > 0 ) filtered . add ( doc ) ; } return filtered ;
public class ModuleVisitor { /** * Visit an exported package of the current module . * @ param packaze the qualified name of the exported package . * @ param access the access flag of the exported package , * valid values are among { @ code ACC _ SYNTHETIC } and * { @ code ACC _ MANDATED } . * @ param modules the qualified names of the modules that can access to * the public classes of the exported package or * < tt > null < / tt > . */ public void visitExport ( String packaze , int access , String ... modules ) { } }
if ( mv != null ) { mv . visitExport ( packaze , access , modules ) ; }
public class SentenceSet2VectorSet { /** * read parameters */ private void readParameters ( UnZipModel model ) throws IOException { } }
logger . info ( "read parameters" ) ; // get the param model File paramFile = model . get ( "param" ) ; /* LineNumberReader lr = new LineNumberReader ( new FileReader ( paramFile ) ) ; String line = null ; / / read k if ( ( line = lr . readLine ( ) ) ! = null ) kernel = line . trim ( ) ; / / read t if ( ( line = lr . readLine ( ) ) ! = null ) relationType = Integer . parseInt ( line . trim ( ) ) ; logger . debug ( " kernel : " + kernel ) ; logger . debug ( " relationType : " + relationType ) ; */ parameter . load ( new FileInputStream ( paramFile ) ) ;
public class PravegaTablesStoreHelper { /** * Removes a batch of entries from the table store . Ignores data not found exception and treats it as success . * If table store throws dataNotFound for a subset of entries , there is no way for this method to disambiguate . * So it is the responsibility of the caller to use this api if they are guaranteed to always attempt to * remove same batch entries . * @ param tableName table name * @ param keys keys to delete * @ return CompletableFuture which when completed will have entries removed from the table . */ public CompletableFuture < Void > removeEntries ( String tableName , Collection < String > keys ) { } }
log . trace ( "remove entry called for : {} keys : {}" , tableName , keys ) ; List < TableKey < byte [ ] > > listOfKeys = keys . stream ( ) . map ( x -> new TableKeyImpl < > ( x . getBytes ( Charsets . UTF_8 ) , null ) ) . collect ( Collectors . toList ( ) ) ; return expectingDataNotFound ( withRetries ( ( ) -> segmentHelper . removeTableKeys ( tableName , listOfKeys , authToken . get ( ) , RequestTag . NON_EXISTENT_ID ) , ( ) -> String . format ( "remove entries: keys: %s table: %s" , keys . toString ( ) , tableName ) ) , null ) . thenAcceptAsync ( v -> log . trace ( "entry for keys {} removed from table {}" , keys , tableName ) , executor ) ;
public class ResponsePromise { /** * Handle the promise * @ param promise to handle */ protected void handlePromise ( Promise < T > promise ) { } }
if ( ! promise . isSuccess ( ) ) { this . setException ( promise . cause ( ) ) ; } else { this . response = promise . getNow ( ) ; if ( handlers != null ) { for ( IsSimplePromiseResponseHandler < T > h : handlers ) { h . onResponse ( this ) ; } } }
public class AkkaRpcActor { /** * Handle rpc invocations by looking up the rpc method on the rpc endpoint and calling this * method with the provided method arguments . If the method has a return value , it is returned * to the sender of the call . * @ param rpcInvocation Rpc invocation message */ private void handleRpcInvocation ( RpcInvocation rpcInvocation ) { } }
Method rpcMethod = null ; try { String methodName = rpcInvocation . getMethodName ( ) ; Class < ? > [ ] parameterTypes = rpcInvocation . getParameterTypes ( ) ; rpcMethod = lookupRpcMethod ( methodName , parameterTypes ) ; } catch ( ClassNotFoundException e ) { log . error ( "Could not load method arguments." , e ) ; RpcConnectionException rpcException = new RpcConnectionException ( "Could not load method arguments." , e ) ; getSender ( ) . tell ( new Status . Failure ( rpcException ) , getSelf ( ) ) ; } catch ( IOException e ) { log . error ( "Could not deserialize rpc invocation message." , e ) ; RpcConnectionException rpcException = new RpcConnectionException ( "Could not deserialize rpc invocation message." , e ) ; getSender ( ) . tell ( new Status . Failure ( rpcException ) , getSelf ( ) ) ; } catch ( final NoSuchMethodException e ) { log . error ( "Could not find rpc method for rpc invocation." , e ) ; RpcConnectionException rpcException = new RpcConnectionException ( "Could not find rpc method for rpc invocation." , e ) ; getSender ( ) . tell ( new Status . Failure ( rpcException ) , getSelf ( ) ) ; } if ( rpcMethod != null ) { try { // this supports declaration of anonymous classes rpcMethod . setAccessible ( true ) ; if ( rpcMethod . getReturnType ( ) . equals ( Void . TYPE ) ) { // No return value to send back rpcMethod . invoke ( rpcEndpoint , rpcInvocation . getArgs ( ) ) ; } else { final Object result ; try { result = rpcMethod . invoke ( rpcEndpoint , rpcInvocation . getArgs ( ) ) ; } catch ( InvocationTargetException e ) { log . debug ( "Reporting back error thrown in remote procedure {}" , rpcMethod , e ) ; // tell the sender about the failure getSender ( ) . tell ( new Status . Failure ( e . getTargetException ( ) ) , getSelf ( ) ) ; return ; } final String methodName = rpcMethod . getName ( ) ; if ( result instanceof CompletableFuture ) { final CompletableFuture < ? > responseFuture = ( CompletableFuture < ? > ) result ; sendAsyncResponse ( responseFuture , methodName ) ; } else { sendSyncResponse ( result , methodName ) ; } } } catch ( Throwable e ) { log . error ( "Error while executing remote procedure call {}." , rpcMethod , e ) ; // tell the sender about the failure getSender ( ) . tell ( new Status . Failure ( e ) , getSelf ( ) ) ; } }
public class FileBackedCookieStore { /** * each cookie store controls its own directory , we do not need to synchronize */ private void readAll ( ) { } }
final List < CompletableFuture < Void > > futures = new ArrayList < > ( ) ; for ( File file : directory . listFiles ( ) ) { final Runnable loadFile = ( ) -> { if ( file . getName ( ) . endsWith ( SUFFIX ) ) { try ( FileReader reader = new FileReader ( file ) ) { Properties props = new Properties ( ) ; props . load ( reader ) ; Map . Entry < Key , HttpCookie > entry = fromProperties ( props ) ; if ( entry != null ) { add ( entry . getKey ( ) , entry . getValue ( ) ) ; } else { file . delete ( ) ; } } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } } } ; futures . add ( CompletableFuture . runAsync ( loadFile , executor ) ) ; } try { CompletableFuture . allOf ( futures . toArray ( new CompletableFuture [ 0 ] ) ) . get ( ) ; } catch ( InterruptedException | ExecutionException e ) { throw new RuntimeException ( e ) ; }
public class WorkspaceImpl { /** * { @ inheritDoc } */ public ContentHandler getImportContentHandler ( String parentAbsPath , int uuidBehavior ) throws PathNotFoundException , ConstraintViolationException , VersionException , RepositoryException { } }
session . checkLive ( ) ; NodeImpl node = ( NodeImpl ) session . getItem ( parentAbsPath ) ; // checked - in check if ( ! node . checkedOut ( ) ) { throw new VersionException ( "Node " + node . getPath ( ) + " or its nearest ancestor is checked-in" ) ; } // Check if node is not protected if ( node . getDefinition ( ) . isProtected ( ) ) { throw new ConstraintViolationException ( "Can't add protected node " + node . getName ( ) + " to " + node . getParent ( ) . getPath ( ) ) ; } // Check locking if ( ! node . checkLocking ( ) ) { throw new LockException ( "Node " + node . getPath ( ) + " is locked " ) ; } Map < String , Object > context = new HashMap < String , Object > ( ) ; context . put ( ContentImporter . RESPECT_PROPERTY_DEFINITIONS_CONSTRAINTS , true ) ; return new ExportImportFactory ( ) . getImportHandler ( ( ( NodeData ) node . getData ( ) ) , uuidBehavior , session . getTransientNodesManager ( ) . getTransactManager ( ) , session . getTransientNodesManager ( ) . getTransactManager ( ) , nodeTypeManager , session . getLocationFactory ( ) , session . getValueFactory ( ) , getNamespaceRegistry ( ) , session . getAccessManager ( ) , session . getUserState ( ) , context , ( RepositoryImpl ) session . getRepository ( ) , name ) ;
public class TriggerDef { /** * run method declaration < P > * the trigger JSP is run in its own thread here . Its job is simply to * wait until it is told by the main thread that it should fire the * trigger . */ public void run ( ) { } }
while ( keepGoing ) { TriggerData triggerData = popPair ( ) ; if ( triggerData != null ) { if ( triggerData . username != null ) { trigger . fire ( this . vectorIndex , name . name , table . getName ( ) . name , triggerData . oldRow , triggerData . newRow ) ; } } }
public class BookKeeperServiceRunner { /** * Main method that can be used to start BookKeeper out - of - process using BookKeeperServiceRunner . * This is used when invoking this class via ProcessStarter . * @ param args Args . * @ throws Exception If an error occurred . */ public static void main ( String [ ] args ) throws Exception { } }
val b = BookKeeperServiceRunner . builder ( ) ; b . startZk ( false ) ; try { int bkBasePort = Integer . parseInt ( System . getProperty ( PROPERTY_BASE_PORT ) ) ; int bkCount = Integer . parseInt ( System . getProperty ( PROPERTY_BOOKIE_COUNT ) ) ; val bkPorts = new ArrayList < Integer > ( ) ; for ( int i = 0 ; i < bkCount ; i ++ ) { bkPorts . add ( bkBasePort + i ) ; } b . bookiePorts ( bkPorts ) ; b . zkPort ( Integer . parseInt ( System . getProperty ( PROPERTY_ZK_PORT ) ) ) ; b . ledgersPath ( System . getProperty ( PROPERTY_LEDGERS_PATH ) ) ; b . startZk ( Boolean . parseBoolean ( System . getProperty ( PROPERTY_START_ZK , "false" ) ) ) ; b . tLSKeyStore ( System . getProperty ( TLS_KEY_STORE , "../../../config/bookie.keystore.jks" ) ) ; b . tLSKeyStorePasswordPath ( System . getProperty ( TLS_KEY_STORE_PASSWD , "../../../config/bookie.keystore.jks.passwd" ) ) ; b . secureBK ( Boolean . parseBoolean ( System . getProperty ( PROPERTY_SECURE_BK , "false" ) ) ) ; } catch ( Exception ex ) { System . out . println ( String . format ( "Invalid or missing arguments (via system properties). Expected: %s(int), " + "%s(int), %s(int), %s(String). (%s)." , PROPERTY_BASE_PORT , PROPERTY_BOOKIE_COUNT , PROPERTY_ZK_PORT , PROPERTY_LEDGERS_PATH , ex . getMessage ( ) ) ) ; System . exit ( - 1 ) ; return ; } BookKeeperServiceRunner runner = b . build ( ) ; runner . startAll ( ) ; Thread . sleep ( Long . MAX_VALUE ) ;