signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WMultiTextField { /** * { @ inheritDoc } */ @ Override public String [ ] getValue ( ) { } }
Object data = getData ( ) ; if ( data == null ) { return null ; } String [ ] array = null ; // Array data if ( data instanceof String [ ] ) { array = ( String [ ] ) data ; } else if ( data instanceof List ) { // List data List < ? > list = ( List < ? > ) data ; array = new String [ list . size ( ) ] ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) { Object item = list . get ( i ) ; array [ i ] = item == null ? "" : item . toString ( ) ; } } else { // Object array = new String [ ] { data . toString ( ) } ; } return removeEmptyStrings ( array ) ;
public class VoiceApi { /** * Reconnect a call * Reconnect the specified call . This releases the established call and retrieves the held call in one step . This is a quick way to to do [ / voice / calls / { id } / release ] ( / reference / workspace / Voice / index . html # release ) and [ / voice / calls / { id } / retrieve ] ( / reference / workspace / Voice / index . html # retrieve ) . * @ param id The connection ID of the established call ( will be released ) . ( required ) * @ param reconnectData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > reconnectWithHttpInfo ( String id , ReconnectData reconnectData ) throws ApiException { } }
com . squareup . okhttp . Call call = reconnectValidateBeforeCall ( id , reconnectData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class PCA { /** * Calculates pca factors of a matrix , for a flags number of reduced features * returns the factors to scale observations * The return is a factor matrix to reduce ( normalized ) feature sets * @ see pca ( INDArray , int , boolean ) * @ param A the array of features , rows are results , columns are features - will be changed * @ param nDims the number of components on which to project the features * @ param normalize whether to normalize ( adjust each feature to have zero mean ) * @ return the reduced feature set */ public static INDArray pca_factor ( INDArray A , int nDims , boolean normalize ) { } }
if ( normalize ) { // Normalize to mean 0 for each feature ( each column has 0 mean ) INDArray mean = A . mean ( 0 ) ; A . subiRowVector ( mean ) ; } long m = A . rows ( ) ; long n = A . columns ( ) ; // The prepare SVD results , we ' ll decomp A to UxSxV ' INDArray s = Nd4j . create ( A . dataType ( ) , m < n ? m : n ) ; INDArray VT = Nd4j . create ( A . dataType ( ) , new long [ ] { n , n } , 'f' ) ; // Note - we don ' t care about U Nd4j . getBlasWrapper ( ) . lapack ( ) . gesvd ( A , s , null , VT ) ; // for comparison k & nDims are the equivalent values in both methods implementing PCA // So now let ' s rip out the appropriate number of left singular vectors from // the V output ( note we pulls rows since VT is a transpose of V ) INDArray V = VT . transpose ( ) ; INDArray factor = Nd4j . create ( A . dataType ( ) , new long [ ] { n , nDims } , 'f' ) ; for ( int i = 0 ; i < nDims ; i ++ ) { factor . putColumn ( i , V . getColumn ( i ) ) ; } return factor ;
public class CassandraServerTriggerAspect { /** * Logs an error message for unhandled exception thrown from the target method . * @ param joinPoint - the joint point cut that contains information about the target * @ param throwable - the cause of the exception from the target method invocation */ @ AfterThrowing ( pointcut = "execution(* org.apache.cassandra.thrift.CassandraServer.doInsert(..))" , throwing = "throwable" ) public void logErrorFromThrownException ( final JoinPoint joinPoint , final Throwable throwable ) { } }
final String className = joinPoint . getTarget ( ) . getClass ( ) . getName ( ) ; final String methodName = joinPoint . getSignature ( ) . getName ( ) ; logger . error ( "Could not write to cassandra! Method: " + className + "." + methodName + "()" , throwable ) ;
public class CounterContext { /** * Determine the count relationship between two contexts . * EQUAL : Equal set of nodes and every count is equal . * GREATER _ THAN : Superset of nodes and every count is equal or greater than its corollary . * LESS _ THAN : Subset of nodes and every count is equal or less than its corollary . * DISJOINT : Node sets are not equal and / or counts are not all greater or less than . * Strategy : compare node logical clocks ( like a version vector ) . * @ param left counter context . * @ param right counter context . * @ return the Relationship between the contexts . */ public Relationship diff ( ByteBuffer left , ByteBuffer right ) { } }
Relationship relationship = Relationship . EQUAL ; ContextState leftState = ContextState . wrap ( left ) ; ContextState rightState = ContextState . wrap ( right ) ; while ( leftState . hasRemaining ( ) && rightState . hasRemaining ( ) ) { // compare id bytes int compareId = leftState . compareIdTo ( rightState ) ; if ( compareId == 0 ) { long leftClock = leftState . getClock ( ) ; long rightClock = rightState . getClock ( ) ; long leftCount = leftState . getCount ( ) ; long rightCount = rightState . getCount ( ) ; // advance leftState . moveToNext ( ) ; rightState . moveToNext ( ) ; // process clock comparisons if ( leftClock == rightClock ) { if ( leftCount != rightCount ) { // Inconsistent shard ( see the corresponding code in merge ( ) ) . We return DISJOINT in this // case so that it will be treated as a difference , allowing read - repair to work . return Relationship . DISJOINT ; } } else if ( ( leftClock >= 0 && rightClock > 0 && leftClock > rightClock ) || ( leftClock < 0 && ( rightClock > 0 || leftClock < rightClock ) ) ) { if ( relationship == Relationship . EQUAL ) relationship = Relationship . GREATER_THAN ; else if ( relationship == Relationship . LESS_THAN ) return Relationship . DISJOINT ; // relationship = = Relationship . GREATER _ THAN } else { if ( relationship == Relationship . EQUAL ) relationship = Relationship . LESS_THAN ; else if ( relationship == Relationship . GREATER_THAN ) return Relationship . DISJOINT ; // relationship = = Relationship . LESS _ THAN } } else if ( compareId > 0 ) { // only advance the right context rightState . moveToNext ( ) ; if ( relationship == Relationship . EQUAL ) relationship = Relationship . LESS_THAN ; else if ( relationship == Relationship . GREATER_THAN ) return Relationship . DISJOINT ; // relationship = = Relationship . LESS _ THAN } else // compareId < 0 { // only advance the left context leftState . moveToNext ( ) ; if ( relationship == Relationship . EQUAL ) relationship = Relationship . GREATER_THAN ; else if ( relationship == Relationship . LESS_THAN ) return Relationship . DISJOINT ; // relationship = = Relationship . GREATER _ THAN } } // check final lengths if ( leftState . hasRemaining ( ) ) { if ( relationship == Relationship . EQUAL ) return Relationship . GREATER_THAN ; else if ( relationship == Relationship . LESS_THAN ) return Relationship . DISJOINT ; } if ( rightState . hasRemaining ( ) ) { if ( relationship == Relationship . EQUAL ) return Relationship . LESS_THAN ; else if ( relationship == Relationship . GREATER_THAN ) return Relationship . DISJOINT ; } return relationship ;
public class GroupImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case SimpleAntlrPackage . GROUP__ELEMENTS : return elements != null && ! elements . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class RequestCreator { /** * Add a list of custom transformations to be applied to the image . * Custom transformations will always be run after the built - in transformations . */ @ NonNull public RequestCreator transform ( @ NonNull List < ? extends Transformation > transformations ) { } }
data . transform ( transformations ) ; return this ;
public class LibertyJtaPlatform { /** * Invokes our implementation for methods of org . hibernate . engine . transaction . jta . platform . spi . JtaPlatform */ @ Override public Object invoke ( Object proxy , Method method , Object [ ] args ) throws Throwable { } }
String methodName = method . getName ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , methodName , args ) ; Object r = null ; try { if ( args == null || args . length == 0 ) { if ( "canRegisterSynchronization" . equals ( methodName ) ) r = canRegisterSynchronization ( ) ; else if ( "getCurrentStatus" . equals ( methodName ) ) r = getCurrentStatus ( ) ; else if ( "hashCode" . equals ( methodName ) ) r = System . identityHashCode ( this ) ; else if ( "retrieveTransactionManager" . equals ( methodName ) ) r = retrieveTransactionManager ( ) ; else if ( "retrieveUserTransaction" . equals ( methodName ) ) r = retrieveUserTransaction ( ) ; else if ( "toString" . equals ( methodName ) ) r = new StringBuilder ( getClass ( ) . getName ( ) ) . append ( '@' ) . append ( Integer . toHexString ( System . identityHashCode ( this ) ) ) . toString ( ) ; } else { if ( "equals" . equals ( methodName ) ) r = proxy == args [ 0 ] ; // assumes one proxy per invocation handler else if ( "getTransactionIdentifier" . equals ( methodName ) ) r = getTransactionIdentifier ( ( Transaction ) args [ 0 ] ) ; else if ( "registerSynchronization" . equals ( methodName ) ) registerSynchronization ( ( Synchronization ) args [ 0 ] ) ; } } catch ( Throwable x ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , methodName , x ) ; throw x ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , methodName , r ) ; return r ;
public class CmsLanguagePreference { /** * Gets the options for the language selector . < p > * @ param setLocale the locale for the select options * @ return the options for the language selector */ private String getOptionsForLanguage ( Locale setLocale ) { } }
// get available locales from the workplace manager List < Locale > locales = OpenCms . getWorkplaceManager ( ) . getLocales ( ) ; StringBuffer resultBuffer = new StringBuffer ( ) ; int counter = 0 ; Iterator < Locale > i = locales . iterator ( ) ; while ( i . hasNext ( ) ) { Locale currentLocale = i . next ( ) ; // add all locales to the select box String language = currentLocale . getDisplayLanguage ( setLocale ) ; if ( CmsStringUtil . isNotEmpty ( currentLocale . getCountry ( ) ) ) { language = language + " (" + currentLocale . getDisplayCountry ( setLocale ) + ")" ; } if ( CmsStringUtil . isNotEmpty ( currentLocale . getVariant ( ) ) ) { language = language + " (" + currentLocale . getDisplayVariant ( setLocale ) + ")" ; } if ( counter != 0 ) { resultBuffer . append ( "|" ) ; } resultBuffer . append ( currentLocale . toString ( ) ) . append ( ":" ) . append ( language ) ; counter ++ ; } return resultBuffer . toString ( ) ;
public class NumberUtil { /** * Returns an integer X such that 2 ^ X = value . Throws an exception * if value is not a power of 2. * @ param value * @ return */ public static int getPowerOf2 ( long value ) { } }
Preconditions . checkArgument ( isPowerOf2 ( value ) ) ; return Long . SIZE - ( Long . numberOfLeadingZeros ( value ) + 1 ) ;
public class ConsistentGuardian { /** * eventually consistent processing * @ param logCtx */ public boolean process ( LogProcessContext logCtx ) { } }
Boolean currentTrxStatus = logCtx . getFinalMasterTransStatus ( ) ; logCtx . getMasterTransactionStatusVotter ( ) . setTransactionStatus ( currentTrxStatus ) ; LogCollection logCollection = logCtx . getLogCollection ( ) ; List < Content > orderedContents = logCollection . getOrderedContents ( ) ; // 依次调用LOG对应的processor里的preProcess // 目前preProcess中其中一个任务就是确定主控事务状态 , 另外一个就是执行SAGA的 正向调用 / TRY 等方法 // 若主控事务状态未知 , 则部分LOG对应的preProcess操作里将会投票表决主控事务状态 , 若有表示不能提交的话 , 则事务状态为回滚 // 根据preProcess的数据更新主控事务状态 ( 主控事务状态确定 , 才能往下执行 , ) for ( int i = 0 ; i < orderedContents . size ( ) ; i ++ ) { Content content = orderedContents . get ( i ) ; // check log order Assert . isTrue ( content . getcId ( ) != null && content . getcId ( ) . equals ( i + 1 ) , "content list did not sort or contentId is null" ) ; Class < ? extends LogProcessor > proccessorClass = ContentType . getById ( content . getLogType ( ) ) . getProccessorClass ( ) ; if ( proccessorClass == null ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "processor not set,continue" + content ) ; } continue ; } LogProcessor processor = proccessorMap . get ( proccessorClass ) ; if ( ! processor . preLogProcess ( logCtx , content ) ) { LOG . warn ( "log pre-Processor return false,end proccesing and retry later." + content ) ; return false ; } } // 事务状态未知 , 且本事务为主控事务 , 则更新主控事务状态 if ( currentTrxStatus == null && MetaDataFilter . getMetaData ( EasytransConstant . CallHeadKeys . PARENT_TRX_ID_KEY ) == null ) { boolean allowCommit = logCtx . getMasterTransactionStatusVotter ( ) . getCurrentVoteStatusCommited ( ) ; int updateCount = transChecker . updateMasterTransactionStatus ( logCtx . getTransactionId ( ) , allowCommit ? TransactionStatus . COMMITTED : TransactionStatus . ROLLBACKED ) ; // concurrent modify check if ( updateCount == 0 ) { throw new RuntimeException ( "can not find the trx,or the status of Transaction is not UNKOWN!" ) ; } logCtx . setFinalMasterTransStatus ( allowCommit ) ; } // 依次调用LOG对应的Processor的logProcess // 统计并发布SemiLog事件 // 发布ProcessEnd事件 for ( int i = 0 ; i < orderedContents . size ( ) ; i ++ ) { Content content = orderedContents . get ( i ) ; // check log order Assert . isTrue ( content . getcId ( ) != null && content . getcId ( ) . equals ( i + 1 ) , "content list did not sort or contentId is null" ) ; if ( content instanceof DemiLeftContent ) { logCtx . getDemiLogManager ( ) . registerLeftDemiLog ( ( DemiLeftContent ) content ) ; } else if ( content instanceof DemiRightContent ) { logCtx . getDemiLogManager ( ) . registerRightDemiLog ( ( DemiRightContent ) content ) ; } Class < ? extends LogProcessor > proccessorClass = ContentType . getById ( content . getLogType ( ) ) . getProccessorClass ( ) ; if ( proccessorClass == null ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "processor not set,continue" + content ) ; } continue ; } LogProcessor processor = proccessorMap . get ( proccessorClass ) ; if ( ! processor . logProcess ( logCtx , content ) ) { LOG . warn ( "log processor return false,end proccesing and retry later." + content ) ; return false ; } } if ( ! logCtx . getDemiLogManager ( ) . pubulishDemiLogEvent ( ) ) { LOG . warn ( "DemiLogEvent Process return false,end proccesing and retry later." + logCollection ) ; } if ( ! logCtx . getProcessEndManager ( ) . publish ( ) ) { LOG . warn ( "End process return false,end proccesing and retry later." + logCollection ) ; } // end and flush log if ( leastLogModel ) { logCtx . getLogCache ( ) . clearCacheLogs ( ) ; } logCtx . getLogCache ( ) . flush ( true ) ; return true ;
public class ProblemEvent { /** * TODO actually implement localization . * @ return The localized message */ public String getLocalizedMessage ( ) { } }
Formatter f = new Formatter ( ) ; f . format ( m_defaultMessage , m_arguments ) ; return f . toString ( ) ;
public class ArchiveManager { /** * Inits the . */ @ Override public void init ( ) { } }
Log . info ( "Starting Archive Manager" ) ; if ( archiveEnv . getArchiveMethod ( ) . equals ( ArchiveMethod . SIMPLE ) ) { executeArchive ( ) ; } else if ( archiveEnv . getArchiveMethod ( ) . equals ( ArchiveMethod . SCHEDULED ) ) { jobs = new JobFactory ( ) . getJobs ( archiveEnv ) ; Schedulers . taskRegistry ( ) . registor ( new CronTrigger ( archiveEnv . getCronPattern ( ) ) , new Runnable ( ) { @ Override public void run ( ) { executeArchive ( ) ; } } ) ; }
public class CheckpointSpout { /** * Loads the last saved checkpoint state the from persistent storage . */ private KeyValueState < String , CheckPointState > loadCheckpointState ( Map conf , TopologyContext ctx ) { } }
String namespace = ctx . getThisComponentId ( ) + "-" + ctx . getThisTaskId ( ) ; KeyValueState < String , CheckPointState > state = ( KeyValueState < String , CheckPointState > ) StateFactory . getState ( namespace , conf , ctx ) ; if ( state . get ( TX_STATE_KEY ) == null ) { CheckPointState txState = new CheckPointState ( - 1 , CheckPointState . State . COMMITTED ) ; state . put ( TX_STATE_KEY , txState ) ; state . commit ( ) ; LOG . debug ( "Initialized checkpoint spout state with txState {}" , txState ) ; } else { LOG . debug ( "Got checkpoint spout state {}" , state . get ( TX_STATE_KEY ) ) ; } return state ;
public class MenusSession { /** * SetupSubMenus Method . */ public void setupSubMenus ( String strMenu ) { } }
Record recMenu = this . getMainRecord ( ) ; try { String strCommandNoCommas = Utility . replace ( strMenu , "," , null ) ; // Get any commas out boolean bIsNumeric = Utility . isNumeric ( strCommandNoCommas ) ; if ( bIsNumeric ) { recMenu . setKeyArea ( Menus . ID_KEY ) ; recMenu . getField ( Menus . ID ) . setString ( strCommandNoCommas ) ; bIsNumeric = recMenu . seek ( "=" ) ; } if ( ! bIsNumeric ) { recMenu . setKeyArea ( Menus . CODE_KEY ) ; recMenu . getField ( Menus . CODE ) . setString ( strMenu ) ; if ( ! recMenu . seek ( "=" ) ) { // Not found , try the default main menu recMenu . getField ( Menus . CODE ) . setString ( HtmlConstants . MAIN_MENU_KEY ) ; recMenu . seek ( "=" ) ; } } } catch ( DBException ex ) { ex . printStackTrace ( ) ; // Never } String strParentID = recMenu . getField ( Menus . ID ) . toString ( ) ; BaseListener listener = recMenu . getListener ( StringSubFileFilter . class . getName ( ) ) ; if ( listener != null ) { // Should just change the string recMenu . removeListener ( listener , true ) ; } recMenu . setKeyArea ( Menus . PARENT_FOLDER_ID_KEY ) ; recMenu . addListener ( new StringSubFileFilter ( strParentID , recMenu . getField ( Menus . PARENT_FOLDER_ID ) , null , null , null , null ) ) ;
public class UpdateFlowEntitlementRequest { /** * The AWS account IDs that you want to share your content with . The receiving accounts ( subscribers ) will be * allowed to create their own flow using your content as the source . * @ param subscribers * The AWS account IDs that you want to share your content with . The receiving accounts ( subscribers ) will be * allowed to create their own flow using your content as the source . */ public void setSubscribers ( java . util . Collection < String > subscribers ) { } }
if ( subscribers == null ) { this . subscribers = null ; return ; } this . subscribers = new java . util . ArrayList < String > ( subscribers ) ;
public class Paragraph { /** * Gets the total leading . * This method is based on the assumption that the * font of the Paragraph is the font of all the elements * that make part of the paragraph . This isn ' t necessarily * true . * @ return the total leading ( fixed and multiplied ) */ public float getTotalLeading ( ) { } }
float m = font == null ? Font . DEFAULTSIZE * multipliedLeading : font . getCalculatedLeading ( multipliedLeading ) ; if ( m > 0 && ! hasLeading ( ) ) { return m ; } return getLeading ( ) + m ;
public class Bindings { /** * Binds the visible state of a to - be - created widget to the supplied boolean value . The * supplied thunk will be called to create the widget ( and add it to the appropriate parent ) * the first time the value transitions to true , at which point the visiblity of the created * widget will be bound to subsequent changes of the value . */ public static void bindVisible ( final Value < Boolean > value , final Thunk thunk ) { } }
Preconditions . checkNotNull ( thunk , "thunk" ) ; value . addListenerAndTrigger ( new Value . Listener < Boolean > ( ) { public void valueChanged ( Boolean visible ) { if ( visible ) { value . removeListener ( this ) ; bindVisible ( value , thunk . createWidget ( ) ) ; } } } ) ;
public class RxSharedPreferences { /** * Create a float preference for { @ code key } with a default of { @ code defaultValue } . */ @ CheckResult @ NonNull public Preference < Float > getFloat ( @ NonNull String key , @ NonNull Float defaultValue ) { } }
checkNotNull ( key , "key == null" ) ; checkNotNull ( defaultValue , "defaultValue == null" ) ; return new RealPreference < > ( preferences , key , defaultValue , FloatAdapter . INSTANCE , keyChanges ) ;
public class DefaultFileRenamePolicy { /** * is atomic and used here to mark when a file name is chosen */ public File rename ( File f ) { } }
if ( createNewFile ( f ) ) { return f ; } String name = f . getName ( ) ; String body = null ; String ext = null ; int dot = name . lastIndexOf ( "." ) ; if ( dot != - 1 ) { body = name . substring ( 0 , dot ) ; ext = name . substring ( dot ) ; // includes " . " } else { body = name ; ext = "" ; } // Increase the count until an empty spot is found . // Max out at 9999 to avoid an infinite loop caused by a persistent // IOException , like when the destination dir becomes non - writable . // We don ' t pass the exception up because our job is just to rename , // and the caller will hit any IOException in normal processing . int count = 0 ; while ( ! createNewFile ( f ) && count < 9999 ) { count ++ ; String newName = body + count + ext ; f = new File ( f . getParent ( ) , newName ) ; } return f ;
public class WebACRolesProvider { /** * Get the roles assigned to this Node . * @ param node the subject Node * @ return a set of roles for each principal */ public Map < String , Collection < String > > getRoles ( final Node node ) { } }
return getAgentRoles ( nodeConverter . convert ( node ) ) ;
public class TextMatchBinding { /** * Splits the specified string . * The string is divided around matches of the space character . * The continuous space characters are counted as one . * @ return * a list of the result strings . */ public static List < String > split ( final String str ) { } }
String s = str ; if ( s != null ) { s = s . trim ( ) ; } List < String > list = new ArrayList < String > ( ) ; if ( s == null || s . length ( ) == 0 ) { return list ; } String [ ] tokens = s . split ( "[ \t\n\r\f]" ) ; // NOTE : tokens [ i ] may be an empty string " " . // excludes empty strings . for ( String t : tokens ) { if ( t . length ( ) > 0 ) { list . add ( t ) ; } } return list ;
public class KeepAliveThread { private synchronized void waitNextLoop ( long ms ) { } }
try { wait ( ms ) ; } catch ( InterruptedException e ) { System . err . println ( e ) ; }
public class ResourceConverter { /** * Resolves actual type to be used for resource deserialization . * If user provides class with type annotation that is equal to the type value in response data , same class * will be used . If provided class is super type of actual class that is resolved using response type value , * subclass will be returned . This allows for deserializing responses in use cases where one of many subtypes * can be returned by the server and user is not sure which one will it be . * @ param object JSON object containing type value * @ param userType provided user type * @ return { @ link Class } */ private Class < ? > getActualType ( JsonNode object , Class < ? > userType ) { } }
String type = object . get ( TYPE ) . asText ( ) ; String definedTypeName = configuration . getTypeName ( userType ) ; if ( definedTypeName != null && definedTypeName . equals ( type ) ) { return userType ; } else { Class < ? > actualType = configuration . getTypeClass ( type ) ; if ( actualType != null && userType . isAssignableFrom ( actualType ) ) { return actualType ; } } throw new UnregisteredTypeException ( type ) ;
public class KeyTranslatorImpl { /** * documentation inherited from interface KeyTranslator */ public int getRepeatRate ( char ch ) { } }
KeyRecord krec = _charCommands . get ( ch ) ; return ( krec == null ) ? DEFAULT_REPEAT_RATE : krec . repeatRate ;
public class KeyPhrasesDetectionJobPropertiesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( KeyPhrasesDetectionJobProperties keyPhrasesDetectionJobProperties , ProtocolMarshaller protocolMarshaller ) { } }
if ( keyPhrasesDetectionJobProperties == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getJobName ( ) , JOBNAME_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getJobStatus ( ) , JOBSTATUS_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getMessage ( ) , MESSAGE_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getSubmitTime ( ) , SUBMITTIME_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getEndTime ( ) , ENDTIME_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getInputDataConfig ( ) , INPUTDATACONFIG_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getOutputDataConfig ( ) , OUTPUTDATACONFIG_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getDataAccessRoleArn ( ) , DATAACCESSROLEARN_BINDING ) ; protocolMarshaller . marshall ( keyPhrasesDetectionJobProperties . getVolumeKmsKeyId ( ) , VOLUMEKMSKEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FileIO { /** * Retrieves file from a remote location identified by a URL . * @ param url * @ return * @ throws IOException */ public static File getRemoteFile ( URL url , boolean keepAlive ) throws IOException { } }
File downloadedFile = File . createTempFile ( "downloaded-" , ".bytes" ) ; URLConnection conn = url . openConnection ( ) ; if ( keepAlive ) { conn . setRequestProperty ( "connection" , "Keep-Alive" ) ; } conn . setUseCaches ( false ) ; try ( ReadableByteChannel inputChannel = Channels . newChannel ( conn . getInputStream ( ) ) ) { try ( WritableByteChannel outputChannel = Channels . newChannel ( new FileOutputStream ( downloadedFile ) ) ) { fastChannelCopy ( inputChannel , outputChannel ) ; } } return downloadedFile ;
public class Http2StateUtil { /** * Creates a stream with given stream id . * @ param conn the HTTP2 connection * @ param streamId the id of the stream * @ throws Http2Exception if a protocol - related error occurred */ private static synchronized void createStream ( Http2Connection conn , int streamId ) throws Http2Exception { } }
conn . local ( ) . createStream ( streamId , false ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Stream created streamId: {}" , streamId ) ; }
public class MetamodelImpl { /** * ( non - Javadoc ) * @ see javax . persistence . metamodel . Metamodel # entity ( java . lang . Class ) */ @ Override public < X > EntityType < X > entity ( Class < X > paramClass ) { } }
EntityType entityType = entityTypes . get ( paramClass ) ; if ( entityType == null ) { throw new IllegalArgumentException ( "Not an entity, {class:" + paramClass + "}" ) ; } return entityType ;
public class InputMapTemplate { /** * Instantiates the input map and installs it into the node via { @ link Nodes # addFallbackInputMap ( Node , InputMap ) } */ public static < S extends Node , E extends Event > void installFallback ( InputMapTemplate < S , E > imt , S node ) { } }
Nodes . addFallbackInputMap ( node , imt . instantiate ( node ) ) ;
public class FeatureTiles { /** * Draw a tile image from the x , y , and zoom level * @ param x * x coordinate * @ param y * y coordinate * @ param zoom * zoom level * @ return tile image , or null */ public BufferedImage drawTile ( int x , int y , int zoom ) { } }
BufferedImage image ; if ( isIndexQuery ( ) ) { image = drawTileQueryIndex ( x , y , zoom ) ; } else { image = drawTileQueryAll ( x , y , zoom ) ; } return image ;
public class Resource { /** * Checks the resource collision . * @ param count * If we are testing W / W conflict , total # of write counts . * For R / W conflict test , this value should be set to { @ link Integer # MAX _ VALUE } . */ public boolean isCollidingWith ( Resource that , int count ) { } }
assert that != null ; for ( Resource r = that ; r != null ; r = r . parent ) if ( this . equals ( r ) && r . numConcurrentWrite < count ) return true ; for ( Resource r = this ; r != null ; r = r . parent ) if ( that . equals ( r ) && r . numConcurrentWrite < count ) return true ; return false ;
public class RecurrenceIteratorFactory { /** * Creates a recurrence iterable from an RRULE . * @ param rrule the recurrence rule * @ param dtStart the start date of the series * @ param tzid the timezone that the start date is in , as well as the * timezone to iterate in * @ return the iterable */ public static RecurrenceIterable createRecurrenceIterable ( final Recurrence rrule , final DateValue dtStart , final TimeZone tzid ) { } }
return new RecurrenceIterable ( ) { public RecurrenceIterator iterator ( ) { return createRecurrenceIterator ( rrule , dtStart , tzid ) ; } } ;
public class ParsingModelIo { /** * Writes a model into a string . * @ param relationsFile the relations file * @ param writeComments true to write comments * @ param lineSeparator the line separator ( if null , the OS ' one is used ) * @ return a string ( never null ) */ public static String writeConfigurationFile ( FileDefinition relationsFile , boolean writeComments , String lineSeparator ) { } }
return new FileDefinitionSerializer ( lineSeparator ) . write ( relationsFile , writeComments ) ;
public class StreamExecutionEnvironment { /** * Generic method to create an input data stream with { @ link org . apache . flink . api . common . io . InputFormat } . * < p > Since all data streams need specific information about their types , this method needs to determine the * type of the data produced by the input format . It will attempt to determine the data type by reflection , * unless the input format implements the { @ link org . apache . flink . api . java . typeutils . ResultTypeQueryable } interface . * In the latter case , this method will invoke the * { @ link org . apache . flink . api . java . typeutils . ResultTypeQueryable # getProducedType ( ) } method to determine data * type produced by the input format . * < p > < b > NOTES ON CHECKPOINTING : < / b > In the case of a { @ link FileInputFormat } , the source * ( which executes the { @ link ContinuousFileMonitoringFunction } ) monitors the path , creates the * { @ link org . apache . flink . core . fs . FileInputSplit FileInputSplits } to be processed , forwards * them to the downstream { @ link ContinuousFileReaderOperator } to read the actual data , and exits , * without waiting for the readers to finish reading . This implies that no more checkpoint * barriers are going to be forwarded after the source exits , thus having no checkpoints . * @ param inputFormat * The input format used to create the data stream * @ param < OUT > * The type of the returned data stream * @ return The data stream that represents the data created by the input format */ @ PublicEvolving public < OUT > DataStreamSource < OUT > createInput ( InputFormat < OUT , ? > inputFormat ) { } }
return createInput ( inputFormat , TypeExtractor . getInputFormatTypes ( inputFormat ) ) ;
public class TxTMHelper { /** * Called by DS to inject reference to RecoveryLog Service * @ param ref */ public void setRecoveryLogService ( ServiceReference < RecLogServiceImpl > ref ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "setRecoveryLogService" , ref ) ; _recoveryLogServiceReady = true ; if ( ableToStartRecoveryNow ( ) ) { // Can start recovery now try { startRecovery ( ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.tx.jta.util.impl.TxTMHelper.setRecoveryLogService" , "148" , this ) ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "setRecoveryLogService" ) ;
public class LatLonGridlineOverlay { /** * gets the start and end points for a latitude line * @ param north * @ param south * @ param zoom * @ return */ private static double [ ] getStartEndPointsNS ( double north , double south , int zoom ) { } }
// brute force when zoom is less than 10 if ( zoom < 10 ) { double sn_start_point = Math . floor ( south ) ; double incrementor = getIncrementor ( zoom ) ; double x = - 90 ; while ( x < sn_start_point ) x = x + incrementor ; sn_start_point = x ; double sn_stop_point = Math . ceil ( north ) ; x = 90 ; while ( x > sn_stop_point ) x = x - incrementor ; sn_stop_point = x ; if ( sn_stop_point > 90 ) { sn_stop_point = 90 ; } if ( sn_start_point < - 90 ) { sn_start_point = - 90 ; } return new double [ ] { sn_start_point , sn_stop_point } ; } else { // hmm start at origin , add inc until we go too far , then back off , go to the next zoom level double sn_start_point = - 90 ; if ( south > 0 ) { sn_start_point = 0 ; } double sn_stop_point = 90 ; if ( north < 0 ) { sn_stop_point = 0 ; } for ( int xx = 2 ; xx <= zoom ; xx ++ ) { double inc = getIncrementor ( xx ) ; while ( sn_start_point < south - inc ) { sn_start_point += inc ; if ( DEBUG ) { System . out . println ( "south " + sn_start_point ) ; } } while ( sn_stop_point > north + inc ) { sn_stop_point -= inc ; if ( DEBUG ) { System . out . println ( "north " + sn_stop_point ) ; } } } return new double [ ] { sn_start_point , sn_stop_point } ; }
public class Channel { /** * Connects this channel to a group and gets a state from a specified state provider . * This method essentially invokes * < code > connect < code > and < code > getState < code > methods successively . * If FLUSH protocol is in channel ' s stack definition only one flush is executed for both connecting and * fetching state rather than two flushes if we invoke < code > connect < code > and < code > getState < code > in succession . * If the channel is closed an exception will be thrown . * @ param cluster _ name the cluster name to connect to . Cannot be null . * @ param target the state provider . If null state will be fetched from coordinator , unless this channel is coordinator . * @ param timeout the timeout for state transfer . * @ exception Exception Connecting to the cluster or state transfer was not successful * @ exception IllegalStateException The channel is closed and therefore cannot be used */ public void connect ( String cluster_name , Address target , long timeout ) throws Exception { } }
ch . connect ( cluster_name , target , timeout ) ;
public class FA { /** * Negates normal states and end states . If the automaton is deterministic , * the accepted language is negated . */ public void not ( ) { } }
IntBitSet tmp ; tmp = new IntBitSet ( ) ; tmp . addRange ( 0 , used - 1 ) ; tmp . removeAll ( ends ) ; ends = tmp ;
public class MessageLogGridScreen { /** * IsContactDisplay Method . */ public boolean isContactDisplay ( ) { } }
String strUserContactType = this . getProperty ( DBParams . CONTACT_TYPE ) ; String strUserContactID = this . getProperty ( DBParams . CONTACT_ID ) ; String strContactType = ( ( ReferenceField ) this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_TYPE_ID ) ) . getReference ( ) . getField ( ContactType . CODE ) . toString ( ) ; String strContactID = this . getScreenRecord ( ) . getField ( MessageLogScreenRecord . CONTACT_ID ) . toString ( ) ; if ( ( strUserContactID != null ) && ( strUserContactID . equals ( strContactID ) ) ) if ( ( strUserContactType != null ) && ( strUserContactType . equals ( strContactType ) ) ) return true ; return false ;
public class BaseWindowedBolt { /** * define a tumbling processing time window * @ param size window size */ public BaseWindowedBolt < T > timeWindow ( Time size ) { } }
long s = size . toMilliseconds ( ) ; ensurePositiveTime ( s ) ; setSizeAndSlide ( s , DEFAULT_SLIDE ) ; this . windowAssigner = TumblingProcessingTimeWindows . of ( s ) ; return this ;
public class Roaring64NavigableMap { /** * For better performance , consider the Use the { @ link # forEach forEach } method . * @ return a custom iterator over set bits , the bits are traversed in ascending sorted order */ public Iterator < Long > iterator ( ) { } }
final LongIterator it = getLongIterator ( ) ; return new Iterator < Long > ( ) { @ Override public boolean hasNext ( ) { return it . hasNext ( ) ; } @ Override public Long next ( ) { return it . next ( ) ; } @ Override public void remove ( ) { // TODO ? throw new UnsupportedOperationException ( ) ; } } ;
public class FragmentJoiner { /** * Get the RMS of the JointFragments pair frag * @ param ca1 the array of all atoms of structure1 * @ param ca2 the array of all atoms of structure1 * @ param frag the JointFragments object that contains the list of identical positions * @ return the rms */ public static double getRMS ( Atom [ ] ca1 , Atom [ ] ca2 , JointFragments frag ) throws StructureException { } }
// now svd ftmp and check if the rms is < X . . . AlternativeAlignment ali = new AlternativeAlignment ( ) ; ali . apairs_from_idxlst ( frag ) ; double rms = 999 ; int [ ] idx1 = ali . getIdx1 ( ) ; int [ ] idx2 = ali . getIdx2 ( ) ; Atom [ ] ca1subset = AlignUtils . getFragmentFromIdxList ( ca1 , idx1 ) ; Atom [ ] ca2subset = AlignUtils . getFragmentFromIdxList ( ca2 , idx2 ) ; ali . calculateSuperpositionByIdx ( ca1 , ca2 ) ; Matrix rot = ali . getRotationMatrix ( ) ; Atom atom = ali . getShift ( ) ; for ( Atom a : ca2subset ) { Calc . rotate ( a , rot ) ; Calc . shift ( a , atom ) ; } rms = Calc . rmsd ( ca1subset , ca2subset ) ; return rms ;
public class CollectionBindings { /** * Returns an object binding whose value is the reduction of all elements in the list . * @ param items the observable list of elements . * @ param reducer an associative , non - interfering , stateless function for combining two values . * @ param supplier a { @ code Supplier } whose result is returned if no value is present . * @ return an object binding */ public static < T > ObjectBinding < T > reducing ( final ObservableList < T > items , final ObservableValue < BinaryOperator < T > > reducer , final Supplier < T > supplier ) { } }
return Bindings . createObjectBinding ( ( ) -> items . stream ( ) . reduce ( reducer . getValue ( ) ) . orElseGet ( supplier ) , items , reducer ) ;
public class AbstractFileCache { /** * 获得缓存过的文件bytes * @ param file 文件 * @ return 缓存过的文件bytes * @ throws IORuntimeException IO异常 */ public byte [ ] getFileBytes ( File file ) throws IORuntimeException { } }
byte [ ] bytes = cache . get ( file ) ; if ( bytes != null ) { return bytes ; } // add file bytes = FileUtil . readBytes ( file ) ; if ( ( maxFileSize != 0 ) && ( file . length ( ) > maxFileSize ) ) { // 大于缓存空间 , 不缓存 , 直接返回 return bytes ; } usedSize += bytes . length ; // 文件放入缓存 , 如果usedSize > capacity , purge ( ) 方法将被调用 cache . put ( file , bytes ) ; return bytes ;
public class LongStream { /** * Returns the first element wrapped by { @ code OptionalLong } class . * If stream is empty , returns { @ code OptionalLong . empty ( ) } . * < p > This is a short - circuiting terminal operation . * @ return an { @ code OptionalLong } with first element * or { @ code OptionalLong . empty ( ) } if stream is empty */ @ NotNull public OptionalLong findFirst ( ) { } }
if ( iterator . hasNext ( ) ) { return OptionalLong . of ( iterator . nextLong ( ) ) ; } return OptionalLong . empty ( ) ;
public class ActionType { /** * This is where a code snippet template is added . */ public void addTemplate ( int row , int column , String content ) { } }
if ( this . sourceBuilder == null ) { throw new DecisionTableParseException ( "Unexpected content \"" + content + "\" in cell " + RuleSheetParserUtil . rc2name ( row , column ) + ", leave this cell blank" ) ; } this . sourceBuilder . addTemplate ( row , column , content ) ;
public class AddressImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . Address # getURI ( ) */ public URI getURI ( ) { } }
final javax . sip . address . URI localUri = getAddress ( ) . getURI ( ) ; if ( localUri instanceof javax . sip . address . SipURI ) return new SipURIImpl ( ( javax . sip . address . SipURI ) localUri , isModifiable ) ; else if ( localUri instanceof javax . sip . address . TelURL ) return new TelURLImpl ( ( javax . sip . address . TelURL ) localUri ) ; else if ( localUri instanceof javax . sip . address . URI ) { URI uri = new GenericURIImpl ( ( javax . sip . address . URI ) localUri ) ; // setting the value to make sure jain sip runs scheme validation on it ( ( Parameterable ) uri ) . setValue ( localUri . toString ( ) ) ; return uri ; } else throw new IllegalArgumentException ( "unsupported operation - unknown scheme" ) ;
public class UpdateEventSourceMappingRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateEventSourceMappingRequest updateEventSourceMappingRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateEventSourceMappingRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateEventSourceMappingRequest . getUUID ( ) , UUID_BINDING ) ; protocolMarshaller . marshall ( updateEventSourceMappingRequest . getFunctionName ( ) , FUNCTIONNAME_BINDING ) ; protocolMarshaller . marshall ( updateEventSourceMappingRequest . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( updateEventSourceMappingRequest . getBatchSize ( ) , BATCHSIZE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ThreadLocalRandom { /** * Returns the pseudo - randomly initialized or updated secondary seed . */ static final int nextSecondarySeed ( ) { } }
int r ; Thread t = Thread . currentThread ( ) ; if ( ( r = U . getInt ( t , SECONDARY ) ) != 0 ) { r ^= r << 13 ; // xorshift r ^= r >>> 17 ; r ^= r << 5 ; } else if ( ( r = mix32 ( seeder . getAndAdd ( SEEDER_INCREMENT ) ) ) == 0 ) r = 1 ; // avoid zero U . putInt ( t , SECONDARY , r ) ; return r ;
public class ModelMetrics { /** * For one or more water . ModelMetrics from the KV store return Response containing a map of them . */ private Response serveOneOrAll ( List < water . ModelMetrics > list ) { } }
JsonArray metricsArray = new JsonArray ( ) ; for ( water . ModelMetrics metrics : list ) { JsonObject metricsJson = metrics . toJSON ( ) ; metricsArray . add ( metricsJson ) ; } JsonObject result = new JsonObject ( ) ; result . add ( "metrics" , metricsArray ) ; return Response . done ( result ) ;
public class ConfigObject { /** * Converts this ConfigObject into a the java . util . Properties format , flattening the tree structure beforehand * @ return A java . util . Properties instance */ public Properties toProperties ( ) { } }
Properties props = new Properties ( ) ; flatten ( props ) ; props = convertValuesToString ( props ) ; return props ;
public class FileSystemContext { /** * Acquires a block master client resource from the block master client pool . The resource is * { @ code Closeable } . * @ return the acquired block master client resource */ public CloseableResource < BlockMasterClient > acquireBlockMasterClientResource ( ) { } }
return new CloseableResource < BlockMasterClient > ( mBlockMasterClientPool . acquire ( ) ) { @ Override public void close ( ) { mBlockMasterClientPool . release ( get ( ) ) ; } } ;
public class CnvIbnVersionToCv { /** * < p > Put version current and old to ColumnsValues * according version algorithm . < / p > * @ param pAddParam expected version algorithm with name " versionAlgorithm " . * @ param pFrom from a Long object * @ param pTo to ColumnsValues * @ param pName by a name * @ throws Exception - an exception */ @ Override public final void convert ( final Map < String , Object > pAddParam , final Long pFrom , final ColumnsValues pTo , final String pName ) throws Exception { } }
Integer versionAlgorithm = ( Integer ) pAddParam . get ( "versionAlgorithm" ) ; if ( versionAlgorithm == null ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "Missed parameter versionAlgorithm!" ) ; } Long valueLngNew = null ; if ( versionAlgorithm == 1 ) { valueLngNew = new Date ( ) . getTime ( ) ; } else { if ( pFrom == null ) { valueLngNew = 1L ; } else { valueLngNew = pFrom + 1 ; } } pTo . put ( pName , valueLngNew ) ; pTo . put ( ISrvOrm . VERSIONOLD_NAME , pFrom ) ;
public class SparseHashDoubleVector { /** * { @ inheritDoc } */ public double add ( int index , double delta ) { } }
double val = vector . get ( index ) + delta ; if ( val == 0 ) vector . remove ( index ) ; else set ( index , val ) ; nonZeroIndices = null ; magnitude = - 1 ; return val ;
public class ViewHelper { /** * Equivalent to calling ImageView . setImageBitmap * @ param cacheView The cache of views to get the view from * @ param viewId The id of the view whose image should change * @ param bm The bitmap to set */ public static void setImageBitmap ( EfficientCacheView cacheView , int viewId , Bitmap bm ) { } }
View view = cacheView . findViewByIdEfficient ( viewId ) ; if ( view instanceof ImageView ) { ( ( ImageView ) view ) . setImageBitmap ( bm ) ; }
public class ProtoLexer { /** * $ ANTLR start " SYNTAX " */ public final void mSYNTAX ( ) throws RecognitionException { } }
try { int _type = SYNTAX ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 99:5 : ( ' syntax ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 99:9 : ' syntax ' { match ( "syntax" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class ProfileService { /** * Returns a collection of all profiles * @ return Collection of all Profiles * @ throws Exception exception */ public List < Profile > findAllProfiles ( ) throws Exception { } }
ArrayList < Profile > allProfiles = new ArrayList < > ( ) ; PreparedStatement statement = null ; ResultSet results = null ; try ( Connection sqlConnection = sqlService . getConnection ( ) ) { statement = sqlConnection . prepareStatement ( "SELECT * FROM " + Constants . DB_TABLE_PROFILE ) ; results = statement . executeQuery ( ) ; while ( results . next ( ) ) { allProfiles . add ( this . getProfileFromResultSet ( results ) ) ; } } catch ( Exception e ) { throw e ; } finally { try { if ( results != null ) { results . close ( ) ; } } catch ( Exception e ) { } try { if ( statement != null ) { statement . close ( ) ; } } catch ( Exception e ) { } } return allProfiles ;
public class UriUtils { /** * Creates a new URL string from the specified base URL and parameters * encoded in non - form encoding , www - urlencoded . * @ param baseUrl The base URL excluding parameters . * @ param paramMap The parameters . * @ return The full URL string . */ public static String newWwwUrlEncodedUrl ( final String baseUrl , final Map < String , String > paramMap ) { } }
final StringBuilder sb = new StringBuilder ( ) ; sb . append ( baseUrl ) ; sb . append ( getUrlParameters ( paramMap , false ) ) ; return sb . toString ( ) ;
public class Dialog { /** * Set the background drawable of neutral action button . * @ param id The resourceId of drawable . * @ return The Dialog for chaining methods . */ public Dialog neutralActionBackground ( int id ) { } }
return neutralActionBackground ( id == 0 ? null : getContext ( ) . getResources ( ) . getDrawable ( id ) ) ;
public class GeometrySimplificationProcessImpl { /** * Accepts a geometry and a resolution . Returns a version of the geometry * which is simplified for the given resolution . If the initial geometry * is already simplified enough , then return null . * @ param geometry Geometry to simplify * @ param resolution Resolution at which the geometry should be simplified * @ return The simplified geometry . Null , if no simplification is possible . * @ throws Exception */ public Geometry simplifyGeometryAtResolution ( Geometry geometry , double resolution ) throws Exception { } }
double inverseRes = 1 / resolution ; double p = Math . log10 ( inverseRes ) ; double exp = Math . ceil ( p ) ; if ( exp < 0 ) exp = 0 ; double factor = Math . pow ( 10 , exp ) ; Geometry simplifiedGeometry = simplify ( geometry , resolution , factor ) ; return simplifiedGeometry ;
public class SlidingTabLayout { /** * Sets the associated view pager . Note that the assumption here is that the pager content * ( number of tabs and tab titles ) does not change after this call has been made . */ public void setViewPager ( ViewPager viewPager ) { } }
mTabStrip . removeAllViews ( ) ; mViewPager = viewPager ; if ( viewPager != null ) { viewPager . setOnPageChangeListener ( new InternalViewPagerListener ( ) ) ; populateTabStrip ( ) ; }
public class VersionParser { /** * Finds the nearest character type . * @ param types * the character types to choose from * @ return the nearest character type or { @ code EOL } */ private CharType nearestCharType ( CharType ... types ) { } }
for ( Character chr : chars ) { for ( CharType type : types ) { if ( type . isMatchedBy ( chr ) ) { return type ; } } } return CharType . EOL ;
public class ODMGBaseBeanImpl { /** * Delete a Collection of objects . */ public void deleteObjects ( Collection objects ) { } }
for ( Iterator iterator = objects . iterator ( ) ; iterator . hasNext ( ) ; ) { getDatabase ( ) . deletePersistent ( iterator . next ( ) ) ; }
public class TemplateParser { /** * Resolve static method calls using static imports * @ param expression The expression to resolve */ private void resolveStaticMethodsUsingImports ( Expression expression ) { } }
if ( expression instanceof MethodCallExpr ) { MethodCallExpr methodCall = ( ( MethodCallExpr ) expression ) ; String methodName = methodCall . getName ( ) . getIdentifier ( ) ; if ( ! methodCall . getScope ( ) . isPresent ( ) && context . hasStaticMethod ( methodName ) ) { methodCall . setName ( context . getFullyQualifiedNameForMethodName ( methodName ) ) ; } } // Recurse downward in the expression expression . getChildNodes ( ) . stream ( ) . filter ( Expression . class :: isInstance ) . map ( Expression . class :: cast ) . forEach ( this :: resolveStaticMethodsUsingImports ) ;
public class V1KnowledgeComponentImplementationModel { /** * { @ inheritDoc } */ @ Override public WorkItemHandlersModel getWorkItemHandlers ( ) { } }
if ( _workItemHandlers == null ) { _workItemHandlers = ( WorkItemHandlersModel ) getFirstChildModel ( WORK_ITEM_HANDLERS ) ; } return _workItemHandlers ;
public class LIBORMarketModelWithTenorRefinement { /** * Return the complete vector of the drift for the time index timeIndex , given that current state is realizationAtTimeIndex . * The drift will be zero for rates being already fixed . * The method currently provides the drift for either < code > Measure . SPOT < / code > or < code > Measure . TERMINAL < / code > - depending how the * model object was constructed . For < code > Measure . TERMINAL < / code > the j - th entry of the return value is the random variable * \ mu _ { j } ^ { \ mathbb { Q } ^ { P ( T _ { n } ) } } ( t ) \ = \ - \ mathop { \ sum _ { l \ geq j + 1 } } _ { l \ leq n - 1 } \ frac { \ delta _ { l } } { 1 + \ delta _ { l } L _ { l } ( t ) } ( \ lambda _ { j } ( t ) \ cdot \ lambda _ { l } ( t ) ) * and for < code > Measure . SPOT < / code > the j - th entry of the return value is the random variable * \ mu _ { j } ^ { \ mathbb { Q } ^ { N } } ( t ) \ = \ \ sum _ { m ( t ) & lt ; l \ leq j } \ frac { \ delta _ { l } } { 1 + \ delta _ { l } L _ { l } ( t ) } ( \ lambda _ { j } ( t ) \ cdot \ lambda _ { l } ( t ) ) * where \ ( \ lambda _ { j } \ ) is the vector for factor loadings for the j - th component of the stochastic process ( that is , the diffusion part is * \ ( \ sum _ { k = 1 } ^ m \ lambda _ { j , k } \ mathrm { d } W _ { k } \ ) ) . * Note : The scalar product of the factor loadings determines the instantaneous covariance . If the model is written in log - coordinates ( using exp as a state space transform ) , we find * \ ( \ lambda _ { j } \ cdot \ lambda _ { l } = \ sum _ { k = 1 } ^ m \ lambda _ { j , k } \ lambda _ { l , k } = \ sigma _ { j } \ sigma _ { l } \ rho _ { j , l } \ ) . * If the model is written without a state space transformation ( in its orignial coordinates ) then \ ( \ lambda _ { j } \ cdot \ lambda _ { l } = \ sum _ { k = 1 } ^ m \ lambda _ { j , k } \ lambda _ { l , k } = L _ { j } L _ { l } \ sigma _ { j } \ sigma _ { l } \ rho _ { j , l } \ ) . * @ see net . finmath . montecarlo . interestrate . LIBORMarketModelWithTenorRefinement # getNumeraire ( double ) The calculation of the drift is consistent with the calculation of the numeraire in < code > getNumeraire < / code > . * @ see net . finmath . montecarlo . interestrate . LIBORMarketModelWithTenorRefinement # getFactorLoading ( int , int , RandomVariableInterface [ ] ) The factor loading \ ( \ lambda _ { j , k } \ ) . * @ param timeIndex Time index < i > i < / i > for which the drift should be returned < i > & mu ; ( t < sub > i < / sub > ) < / i > . * @ param realizationAtTimeIndex Time current forward rate vector at time index < i > i < / i > which should be used in the calculation . * @ return The drift vector & mu ; ( t < sub > i < / sub > ) as < code > RandomVariable [ ] < / code > */ @ Override public RandomVariableInterface [ ] getDrift ( int timeIndex , RandomVariableInterface [ ] realizationAtTimeIndex , RandomVariableInterface [ ] realizationPredictor ) { } }
double time = getTime ( timeIndex ) ; double timeStep = getTimeDiscretization ( ) . getTimeStep ( timeIndex ) ; double timeNext = getTime ( timeIndex + 1 ) ; RandomVariableInterface zero = getProcess ( ) . getStochasticDriver ( ) . getRandomVariableForConstant ( 0.0 ) ; // Allocate drift vector and initialize to zero ( will be used to sum up drift components ) RandomVariableInterface [ ] drift = new RandomVariableInterface [ getNumberOfComponents ( ) ] ; for ( int componentIndex = 0 ; componentIndex < getNumberOfComponents ( ) ; componentIndex ++ ) { drift [ componentIndex ] = null ; } RandomVariableInterface [ ] variances = new RandomVariableInterface [ getNumberOfComponents ( ) ] ; for ( int componentIndex = 0 ; componentIndex < getNumberOfComponents ( ) ; componentIndex ++ ) { variances [ componentIndex ] = zero ; } RandomVariableInterface [ ] covarianceFactorSums = new RandomVariableInterface [ getNumberOfFactors ( ) ] ; for ( int factorIndex = 0 ; factorIndex < getNumberOfFactors ( ) ; factorIndex ++ ) { covarianceFactorSums [ factorIndex ] = zero ; } /* * Standard HJM drift part of log - forward - bond */ TimeDiscretizationInterface liborPeriodDiscretization = getLiborPeriodDiscretization ( timeNext ) ; // Calculate drift for the component componentIndex ( starting at firstLiborIndex , others are zero ) for ( int componentIndex = 0 ; componentIndex < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex ++ ) { drift [ componentIndex ] = zero ; double periodStart = liborPeriodDiscretization . getTime ( componentIndex ) ; double periodLength = liborPeriodDiscretization . getTimeStep ( componentIndex ) ; double periodEnd = periodStart + periodLength ; double tenorTime = covarianceModel . getScaledTenorTime ( periodStart , periodEnd ) ; // @ todo Document that factorLoading componentIndexing is on time discretization of t + 1 for interval ( t , t + 1) RandomVariableInterface [ ] factorLoading = getFactorLoading ( timeIndex , componentIndex , realizationAtTimeIndex ) ; double weight = getWeightForTenorRefinement ( periodStart , periodStart , periodStart , periodEnd ) ; for ( int factorIndex = 0 ; factorIndex < getNumberOfFactors ( ) ; factorIndex ++ ) { drift [ componentIndex ] = drift [ componentIndex ] . addProduct ( covarianceFactorSums [ factorIndex ] . addProduct ( factorLoading [ factorIndex ] , weight ) , factorLoading [ factorIndex ] ) ; variances [ componentIndex ] = variances [ componentIndex ] . addProduct ( factorLoading [ factorIndex ] , factorLoading [ factorIndex ] ) ; covarianceFactorSums [ factorIndex ] = covarianceFactorSums [ factorIndex ] . addProduct ( factorLoading [ factorIndex ] , tenorTime ) ; } } /* * Change of tenor discretization - impact on log - forward - bond */ TimeDiscretizationInterface liborPeriodDiscretizationPrevious = getLiborPeriodDiscretization ( time ) ; for ( int componentIndex = 0 ; componentIndex < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex ++ ) { double periodStart = liborPeriodDiscretization . getTime ( componentIndex ) ; double periodLength = liborPeriodDiscretization . getTimeStep ( componentIndex ) ; double periodEnd = periodStart + periodLength ; double periodStartPrevious = liborPeriodDiscretizationPrevious . getTime ( componentIndex ) ; double periodLengthPrevious = liborPeriodDiscretizationPrevious . getTimeStep ( componentIndex ) ; double periodEndPrevious = periodStartPrevious + periodLengthPrevious ; if ( periodStartPrevious == periodStart && periodEndPrevious == periodEnd ) { continue ; } RandomVariableInterface stateVariablePrevious = getStateVariable ( timeIndex , periodStartPrevious , periodEndPrevious ) ; RandomVariableInterface stateVariable = getStateVariable ( timeIndex , periodStart , periodEnd ) ; if ( Double . isNaN ( stateVariable . getAverage ( ) ) || Double . isNaN ( stateVariablePrevious . getAverage ( ) ) ) { throw new IllegalArgumentException ( ) ; } // Shift in indexing and / or tenor refinement drift [ componentIndex ] = drift [ componentIndex ] . add ( stateVariable . sub ( stateVariablePrevious ) . div ( timeStep ) ) ; } /* * Integrated variance - drift part */ for ( int componentIndex = 0 ; componentIndex < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex ++ ) { drift [ getNumberOfLibors ( ) + componentIndex ] = variances [ componentIndex ] ; } /* * Change of tenor discretization - impact on integrated variance */ for ( int componentIndex = 0 ; componentIndex < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex ++ ) { double periodStart = liborPeriodDiscretization . getTime ( componentIndex ) ; double periodLength = liborPeriodDiscretization . getTimeStep ( componentIndex ) ; double periodEnd = periodStart + periodLength ; double periodStartPrevious = liborPeriodDiscretizationPrevious . getTime ( componentIndex ) ; double periodLengthPrevious = liborPeriodDiscretizationPrevious . getTimeStep ( componentIndex ) ; double periodEndPrevious = periodStartPrevious + periodLengthPrevious ; if ( periodStartPrevious == periodStart && periodEndPrevious == periodEnd ) { continue ; } RandomVariableInterface stateVariablePrevious = getIntegratedVariance ( timeIndex , periodStartPrevious , periodEndPrevious ) ; RandomVariableInterface stateVariable = getIntegratedVariance ( timeIndex , periodStart , periodEnd ) ; if ( Double . isNaN ( stateVariable . getAverage ( ) ) || Double . isNaN ( stateVariablePrevious . getAverage ( ) ) ) { throw new IllegalArgumentException ( ) ; } // Shift in indexing drift [ getNumberOfLibors ( ) + componentIndex ] = drift [ getNumberOfLibors ( ) + componentIndex ] . add ( stateVariable . sub ( stateVariablePrevious ) . div ( timeStep ) ) ; } return drift ;
public class Property { /** * Call the getter Method or get value from Field . * @ param pvObject Object , on which the value is get */ public Object executeGetValue ( Object pvObject ) throws IllegalArgumentException , IllegalAccessException , InvocationTargetException { } }
Object lvReturnValue = null ; AccessController . doPrivileged ( new AccessiblePrivilegedAction ( accessibleObject ) ) ; if ( propertyType == PROPERTY_TYPE_METHOD ) { lvReturnValue = ( ( Method ) accessibleObject ) . invoke ( pvObject ) ; } else { lvReturnValue = ( ( Field ) accessibleObject ) . get ( pvObject ) ; } return lvReturnValue ;
public class TFDictionary { /** * 获取频次 * @ param key * @ return */ public int getFrequency ( String key ) { } }
TermFrequency termFrequency = get ( key ) ; if ( termFrequency == null ) return 0 ; return termFrequency . getFrequency ( ) ;
public class ProfileIndexFrameWriter { /** * Adds " All Classes " link for the top of the left - hand frame page to the * documentation tree . * @ param div the Content object to which the all classes link should be added */ protected void addAllClassesLink ( Content div ) { } }
Content linkContent = getHyperLink ( DocPaths . ALLCLASSES_FRAME , allclassesLabel , "" , "packageFrame" ) ; Content span = HtmlTree . SPAN ( linkContent ) ; div . addContent ( span ) ;
public class EventMention { /** * setter for event _ ref - sets * @ generated * @ param v value to set into the feature */ public void setEvent_ref ( Event v ) { } }
if ( EventMention_Type . featOkTst && ( ( EventMention_Type ) jcasType ) . casFeat_event_ref == null ) jcasType . jcas . throwFeatMissing ( "event_ref" , "de.julielab.jules.types.ace.EventMention" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( EventMention_Type ) jcasType ) . casFeatCode_event_ref , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class RejectDependenciesFilter { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . resolver . api . maven . filter . MavenResolutionFilter # accepts ( org . jboss . shrinkwrap . resolver . api . maven . coordinate . MavenDependency , * java . util . List , java . util . List ) */ @ Override public boolean accepts ( final MavenDependency dependency , final List < MavenDependency > dependenciesForResolution , final List < MavenDependency > dependencyAncestors ) { } }
if ( bannedDependencies . contains ( dependency ) ) { return false ; } if ( rejectTransitives ) { if ( dependencyAncestors != null && dependencyAncestors . size ( ) != 0 ) { return dependencyAncestors . get ( 0 ) . equals ( dependency ) ; } } return true ;
public class RebondTool { /** * Rebonds one atom by looking up nearby atom using the binary space partition tree . */ private void bondAtom ( IAtomContainer container , IAtom atom ) { } }
double myCovalentRadius = atom . getCovalentRadius ( ) ; double searchRadius = myCovalentRadius + maxCovalentRadius + bondTolerance ; Point tupleAtom = new Point ( atom . getPoint3d ( ) . x , atom . getPoint3d ( ) . y , atom . getPoint3d ( ) . z ) ; for ( Bspt . EnumerateSphere e = bspt . enumHemiSphere ( tupleAtom , searchRadius ) ; e . hasMoreElements ( ) ; ) { IAtom atomNear = ( ( TupleAtom ) e . nextElement ( ) ) . getAtom ( ) ; if ( ! atomNear . equals ( atom ) && container . getBond ( atom , atomNear ) == null ) { boolean bonded = isBonded ( myCovalentRadius , atomNear . getCovalentRadius ( ) , e . foundDistance2 ( ) ) ; if ( bonded ) { IBond bond = atom . getBuilder ( ) . newInstance ( IBond . class , atom , atomNear , IBond . Order . SINGLE ) ; container . addBond ( bond ) ; } } }
public class GinjectorBindingsOutputter { /** * < http : / / code . google . com / p / google - gin / issues / detail ? id = 156 > . */ private void writeInitializers ( GinjectorBindings bindings , StringBuilder initializeEagerSingletonsBody , StringBuilder initializeStaticInjectionsBody , SourceWriteUtil sourceWriteUtil , SourceWriter writer ) { } }
if ( bindings . hasEagerSingletonBindingInSubtree ( ) ) { sourceWriteUtil . writeMethod ( writer , "public void initializeEagerSingletons()" , initializeEagerSingletonsBody . toString ( ) ) ; } if ( bindings . hasStaticInjectionRequestInSubtree ( ) ) { sourceWriteUtil . writeMethod ( writer , "public void initializeStaticInjections()" , initializeStaticInjectionsBody . toString ( ) ) ; }
public class SmtpRequests { /** * Creates a { @ code RCPT } request . */ public static SmtpRequest rcpt ( CharSequence recipient , CharSequence ... rcptParameters ) { } }
ObjectUtil . checkNotNull ( recipient , "recipient" ) ; if ( rcptParameters == null || rcptParameters . length == 0 ) { return new DefaultSmtpRequest ( SmtpCommand . RCPT , "TO:<" + recipient + '>' ) ; } else { List < CharSequence > params = new ArrayList < CharSequence > ( rcptParameters . length + 1 ) ; params . add ( "TO:<" + recipient + '>' ) ; for ( CharSequence param : rcptParameters ) { params . add ( param ) ; } return new DefaultSmtpRequest ( SmtpCommand . RCPT , params ) ; }
public class Tanimoto { /** * Evaluates Tanimoto coefficient for two bit sets . * @ param bitset1 A bitset ( such as a fingerprint ) for the first molecule * @ param bitset2 A bitset ( such as a fingerprint ) for the second molecule * @ return The Tanimoto coefficient * @ throws org . openscience . cdk . exception . CDKException if bitsets are not of the same length */ public static float calculate ( BitSet bitset1 , BitSet bitset2 ) throws CDKException { } }
float _bitset1_cardinality = bitset1 . cardinality ( ) ; float _bitset2_cardinality = bitset2 . cardinality ( ) ; if ( bitset1 . size ( ) != bitset2 . size ( ) ) { throw new CDKException ( "Bitsets must have the same bit length" ) ; } BitSet one_and_two = ( BitSet ) bitset1 . clone ( ) ; one_and_two . and ( bitset2 ) ; float _common_bit_count = one_and_two . cardinality ( ) ; return _common_bit_count / ( _bitset1_cardinality + _bitset2_cardinality - _common_bit_count ) ;
public class SeaGlassTabbedPaneUI { /** * Layout label text for a tab . * @ param ss the SynthContext . * @ param tabPlacement the side the tabs are on . * @ param metrics the font metrics . * @ param tabIndex the index of the tab to lay out . * @ param title the text for the label , if any . * @ param icon the icon for the label , if any . * @ param tabRect Rectangle to layout text and icon in . * @ param iconRect Rectangle to place icon bounds in * @ param textRect Rectangle to place text in * @ param isSelected is the tab selected ? */ protected void layoutLabel ( SeaGlassContext ss , int tabPlacement , FontMetrics metrics , int tabIndex , String title , Icon icon , Rectangle tabRect , Rectangle iconRect , Rectangle textRect , boolean isSelected ) { } }
View v = getTextViewForTab ( tabIndex ) ; if ( v != null ) { tabPane . putClientProperty ( "html" , v ) ; } textRect . x = textRect . y = iconRect . x = iconRect . y = 0 ; ss . getStyle ( ) . getGraphicsUtils ( ss ) . layoutText ( ss , metrics , title , icon , SwingUtilities . CENTER , SwingUtilities . CENTER , SwingUtilities . LEADING , SwingUtilities . TRAILING , tabRect , iconRect , textRect , textIconGap ) ; tabPane . putClientProperty ( "html" , null ) ; int xNudge = getTabLabelShiftX ( tabPlacement , tabIndex , isSelected ) ; int yNudge = getTabLabelShiftY ( tabPlacement , tabIndex , isSelected ) ; iconRect . x += xNudge ; iconRect . y += yNudge ; textRect . x += xNudge ; textRect . y += yNudge ;
public class Utils { /** * Just like { @ link String # intern ( ) String . intern } , except it generates * flyweights for any kind of object , and it does not prevent them from * being garbage collected . Calling intern on a String does not use the * same String pool used by String . intern because those Strings are not * always garbage collected . Some virtual machines free up Strings from the * interned String pool , others do not . * For objects that do not customize the hashCode and equals methods , * calling intern is not very useful because the object returned will * always be the same as the one passed in . * The object type returned from intern is guaranteed to be exactly the * same type as the one passed in . Calling intern on null returns null . * @ param obj Object to intern * @ return Interned object . * @ see FlyweightSet */ public static synchronized Object intern ( Object obj ) { } }
FlyweightSet set ; if ( ( set = cFlyweightSet ) == null ) { cFlyweightSet = set = new FlyweightSet ( ) ; } return set . put ( obj ) ;
public class JsonRpcControl { /** * 接收远端的调用请求 , 并将回复执行结果 。 * @ param bytes 接收到的数据 * @ param transport { @ link Transport } 实例 * @ throws IOException * @ throws WsonrpcException */ public void receiveRequest ( byte [ ] bytes , Transport transport ) throws IOException , WsonrpcException { } }
JsonRpcResponse resp ; try { JsonRpcMessage msg = receive ( bytes ) ; if ( msg instanceof JsonRpcRequest ) { JsonRpcRequest req = ( JsonRpcRequest ) msg ; resp = execute ( req ) ; } else { resp = new JsonRpcResponse ( null , JsonRpcError . invalidRequestError ( null ) ) ; } } catch ( JsonException e ) { resp = new JsonRpcResponse ( null , JsonRpcError . parseError ( e ) ) ; } catch ( IOException e ) { resp = new JsonRpcResponse ( null , JsonRpcError . internalError ( e ) ) ; } transmit ( transport , resp ) ;
public class ResourceAdapterParser { /** * { @ inheritDoc } */ public Activations parse ( XMLStreamReader reader ) throws Exception { } }
Activations adapters = null ; // iterate over tags int iterate ; try { iterate = reader . nextTag ( ) ; } catch ( XMLStreamException e ) { // founding a non tag . . go on . Normally non - tag found at beginning are comments or DTD declaration iterate = reader . nextTag ( ) ; } switch ( iterate ) { case END_ELEMENT : { // should mean we ' re done , so ignore it . break ; } case START_ELEMENT : { switch ( reader . getLocalName ( ) ) { case XML . ELEMENT_RESOURCE_ADAPTERS : { adapters = parseResourceAdapters ( reader ) ; break ; } default : throw new ParserException ( bundle . unexpectedElement ( reader . getLocalName ( ) ) ) ; } break ; } default : throw new IllegalStateException ( ) ; } return adapters ;
public class FormBeanModel { /** * Sets the collection of properties for a form bean to a new collection . */ public void updateProperties ( Collection newProps ) { } }
_properties = new ArrayList ( ) ; if ( newProps != null ) { _properties . addAll ( newProps ) ; }
public class UIData { /** * Row State preserved implementation is taken from Mojarra */ private void setRowIndexRowStatePreserved ( int rowIndex ) { } }
if ( rowIndex < - 1 ) { throw new IllegalArgumentException ( "rowIndex is less than -1" ) ; } if ( getRowIndex ( ) == rowIndex ) { return ; } FacesContext facesContext = getFacesContext ( ) ; if ( _initialDescendantFullComponentState != null ) { // Just save the row Map < String , Object > sm = saveFullDescendantComponentStates ( facesContext , null , getChildren ( ) . iterator ( ) , false ) ; if ( sm != null && ! sm . isEmpty ( ) ) { _rowDeltaStates . put ( getContainerClientId ( facesContext ) , sm ) ; } if ( getRowIndex ( ) != - 1 ) { _rowTransientStates . put ( getContainerClientId ( facesContext ) , saveTransientDescendantComponentStates ( facesContext , null , getChildren ( ) . iterator ( ) , false ) ) ; } } // Update to the new row index // this . rowIndex = rowIndex ; getStateHelper ( ) . put ( PropertyKeys . rowIndex , rowIndex ) ; DataModel localModel = getDataModel ( ) ; localModel . setRowIndex ( rowIndex ) ; // if rowIndex is - 1 , clear the cache if ( rowIndex == - 1 ) { setDataModel ( null ) ; } // Clear or expose the current row data as a request scope attribute String var = getVar ( ) ; if ( var != null ) { Map < String , Object > requestMap = getFacesContext ( ) . getExternalContext ( ) . getRequestMap ( ) ; if ( rowIndex == - 1 ) { oldVar = requestMap . remove ( var ) ; } else if ( isRowAvailable ( ) ) { requestMap . put ( var , getRowData ( ) ) ; } else { requestMap . remove ( var ) ; if ( null != oldVar ) { requestMap . put ( var , oldVar ) ; oldVar = null ; } } } if ( _initialDescendantFullComponentState != null ) { Object rowState = _rowDeltaStates . get ( getContainerClientId ( facesContext ) ) ; if ( rowState == null ) { // Restore as original restoreFullDescendantComponentStates ( facesContext , getChildren ( ) . iterator ( ) , _initialDescendantFullComponentState , false ) ; } else { // Restore first original and then delta restoreFullDescendantComponentDeltaStates ( facesContext , getChildren ( ) . iterator ( ) , rowState , _initialDescendantFullComponentState , false ) ; } if ( getRowIndex ( ) == - 1 ) { restoreTransientDescendantComponentStates ( facesContext , getChildren ( ) . iterator ( ) , null , false ) ; } else { rowState = _rowTransientStates . get ( getContainerClientId ( facesContext ) ) ; if ( rowState == null ) { restoreTransientDescendantComponentStates ( facesContext , getChildren ( ) . iterator ( ) , null , false ) ; } else { restoreTransientDescendantComponentStates ( facesContext , getChildren ( ) . iterator ( ) , ( Map < String , Object > ) rowState , false ) ; } } }
public class TransactionRomanticSnapshotBuilder { protected void setupTableCommandExp ( StringBuilder sb , RomanticTransaction tx ) { } }
final Map < String , Set < String > > tableCommandMap = tx . getReadOnlyTableCommandMap ( ) ; if ( ! tableCommandMap . isEmpty ( ) ) { final StringBuilder mapSb = new StringBuilder ( ) ; mapSb . append ( "map:{" ) ; int index = 0 ; for ( Entry < String , Set < String > > entry : tableCommandMap . entrySet ( ) ) { final String tableName = entry . getKey ( ) ; final Set < String > commandSet = entry . getValue ( ) ; if ( index > 0 ) { mapSb . append ( " ; " ) ; } mapSb . append ( tableName ) ; mapSb . append ( " = list:{" ) . append ( Srl . connectByDelimiter ( commandSet , " ; " ) ) . append ( "}" ) ; ++ index ; } mapSb . append ( "}" ) ; sb . append ( ", " ) . append ( mapSb . toString ( ) ) ; }
public class InviteReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Invite ResourceSet */ @ Override public ResourceSet < Invite > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class SmartBinder { /** * Append the given argument to the argument list , assigning it the * given name . * @ param name the name of the new argument * @ param value the value of the new argument * @ return a new SmartBinder with the append applied */ public SmartBinder append ( String name , Object value ) { } }
return new SmartBinder ( this , signature ( ) . appendArg ( name , value . getClass ( ) ) , binder . append ( value ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AlternateOf } { @ code > } } */ @ XmlElementDecl ( namespace = PROV_NS , name = "alternateOf" ) public JAXBElement < AlternateOf > createAlternateOf ( AlternateOf value ) { } }
return new JAXBElement < AlternateOf > ( _AlternateOf_QNAME , AlternateOf . class , null , value ) ;
public class DescribeTagsResult { /** * Information about the tags . * @ param resourceTags * Information about the tags . */ public void setResourceTags ( java . util . Collection < ResourceTag > resourceTags ) { } }
if ( resourceTags == null ) { this . resourceTags = null ; return ; } this . resourceTags = new com . amazonaws . internal . SdkInternalList < ResourceTag > ( resourceTags ) ;
public class UniverseApi { /** * Get system jumps Get the number of jumps in solar systems within the last * hour ending at the timestamp of the Last - Modified header , excluding * wormhole space . Only systems with jumps will be listed - - - This route is * cached for up to 3600 seconds * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return ApiResponse & lt ; List & lt ; SystemJumpsResponse & gt ; & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < List < SystemJumpsResponse > > getUniverseSystemJumpsWithHttpInfo ( String datasource , String ifNoneMatch ) throws ApiException { } }
com . squareup . okhttp . Call call = getUniverseSystemJumpsValidateBeforeCall ( datasource , ifNoneMatch , null ) ; Type localVarReturnType = new TypeToken < List < SystemJumpsResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class MainForm { /** * Creates a new Mixer for playback * @ since 01.07.2006 * @ return */ private Mixer createNewMixer ( ) { } }
Mixer mixer = getCurrentContainer ( ) . createNewMixer ( ) ; if ( mixer != null ) { mixer . setAudioProcessor ( audioProcessor ) ; mixer . setVolume ( currentVolume ) ; mixer . setBalance ( currentBalance ) ; mixer . setSoundOutputStream ( getSoundOutputStream ( ) ) ; getSeekBarPanel ( ) . setCurrentMixer ( mixer ) ; } return mixer ;
public class AWSOpsWorksClient { /** * Creates an instance in a specified stack . For more information , see < a * href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / workinginstances - add . html " > Adding an Instance to a * Layer < / a > . * < b > Required Permissions < / b > : To use this action , an IAM user must have a Manage permissions level for the stack , * or an attached policy that explicitly grants permissions . For more information on user permissions , see < a * href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User * Permissions < / a > . * @ param createInstanceRequest * @ return Result of the CreateInstance operation returned by the service . * @ throws ValidationException * Indicates that a request was not valid . * @ throws ResourceNotFoundException * Indicates that a resource was not found . * @ sample AWSOpsWorks . CreateInstance * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / CreateInstance " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateInstanceResult createInstance ( CreateInstanceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateInstance ( request ) ;
public class Channel { /** * Query a peer in this channel for a Block by the blockNumber * < STRONG > This method may not be thread safe if client context is changed ! < / STRONG > * @ param peer the peer to send the request to * @ param blockNumber index of the Block in the chain * @ return the { @ link BlockInfo } with the given blockNumber * @ throws InvalidArgumentException * @ throws ProposalException */ public BlockInfo queryBlockByNumber ( Peer peer , long blockNumber ) throws InvalidArgumentException , ProposalException { } }
return queryBlockByNumber ( Collections . singleton ( peer ) , blockNumber ) ;
public class SlackBot { /** * Invoked when bot receives an event of type message with text satisfying * the pattern { @ code ( [ a - z ] { 2 } ) ( \ d + ) ( [ a - z ] { 2 } ) } . For example , * messages like " ab12xy " or " ab2bc " etc will invoke this method . * @ param session * @ param event */ @ Controller ( events = EventType . MESSAGE , pattern = "^([a-z ]{2})(\\d+)([a-z ]{2})$" ) public void onReceiveMessage ( WebSocketSession session , Event event , Matcher matcher ) { } }
reply ( session , event , "First group: " + matcher . group ( 0 ) + "\n" + "Second group: " + matcher . group ( 1 ) + "\n" + "Third group: " + matcher . group ( 2 ) + "\n" + "Fourth group: " + matcher . group ( 3 ) ) ;
public class Vector4f { /** * / * ( non - Javadoc ) * @ see org . joml . Vector4fc # distance ( float , float , float , float ) */ public float distance ( float x , float y , float z , float w ) { } }
return ( float ) Math . sqrt ( distanceSquared ( x , y , z , w ) ) ;
public class ConfigurationConversionService { /** * Determines the charge mode based on the first connector , because VAS does not support multiple protocols for a * single charging station . If no charge mode can be determined UNSPECIFIED will be returned . * @ param evses list of EVSEs . * @ return charge mode or UNSPECIFIED if no specific charge mode can be determined . */ public ChargeMode getChargeModeFromEvses ( Set < Evse > evses ) { } }
ChargeMode chargeMode = ChargeMode . UNSPECIFIED ; for ( Evse evse : evses ) { if ( ! evse . getConnectors ( ) . isEmpty ( ) ) { chargeMode = ChargeMode . fromChargingProtocol ( evse . getConnectors ( ) . get ( 0 ) . getChargingProtocol ( ) ) ; break ; } } return chargeMode ;
public class OtfMessageDecoder { /** * Decode a message from the provided buffer based on the message schema described with IR { @ link Token } s . * @ param buffer containing the encoded message . * @ param offset at which the message encoding starts in the buffer . * @ param actingVersion of the encoded message for dealing with extension fields . * @ param blockLength of the root message fields . * @ param msgTokens in IR format describing the message structure . * @ param listener to callback for decoding the primitive values as discovered in the structure . * @ return the index in the underlying buffer after decoding . */ public static int decode ( final DirectBuffer buffer , final int offset , final int actingVersion , final int blockLength , final List < Token > msgTokens , final TokenListener listener ) { } }
listener . onBeginMessage ( msgTokens . get ( 0 ) ) ; int i = offset ; final int numTokens = msgTokens . size ( ) ; final int tokenIdx = decodeFields ( buffer , i , actingVersion , msgTokens , 1 , numTokens , listener ) ; i += blockLength ; final long packedValues = decodeGroups ( buffer , i , actingVersion , msgTokens , tokenIdx , numTokens , listener ) ; i = decodeData ( buffer , bufferOffset ( packedValues ) , msgTokens , tokenIndex ( packedValues ) , numTokens , actingVersion , listener ) ; listener . onEndMessage ( msgTokens . get ( numTokens - 1 ) ) ; return i ;
public class AggregationIterator { /** * Creates a new iterator for a { @ link SpanGroup } . * @ param spans Spans in a group . * @ param start _ time Any data point strictly before this timestamp will be * ignored . * @ param end _ time Any data point strictly after this timestamp will be * ignored . * @ param aggregator The aggregation function to use . * @ param method Interpolation method to use when aggregating time series * @ param downsampler The downsampling specifier to use ( cannot be null ) * @ param query _ start Start of the actual query * @ param query _ end End of the actual query * @ param rate If { @ code true } , the rate of the series will be used instead * of the actual values . * @ param rate _ options Specifies the optional additional rate calculation * options . * @ return an AggregationIterator * @ since 2.3 */ public static AggregationIterator create ( final List < Span > spans , final long start_time , final long end_time , final Aggregator aggregator , final Interpolation method , final DownsamplingSpecification downsampler , final long query_start , final long query_end , final boolean rate , final RateOptions rate_options ) { } }
final int size = spans . size ( ) ; final SeekableView [ ] iterators = new SeekableView [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { SeekableView it ; if ( downsampler == null || downsampler == DownsamplingSpecification . NO_DOWNSAMPLER ) { it = spans . get ( i ) . spanIterator ( ) ; } else { it = spans . get ( i ) . downsampler ( start_time , end_time , downsampler , query_start , query_end ) ; } if ( rate ) { it = new RateSpan ( it , rate_options ) ; } iterators [ i ] = it ; } return new AggregationIterator ( iterators , start_time , end_time , aggregator , method , rate ) ;
public class OpenWatcomLibrarian { /** * Builds a library . * @ param task * task * @ param outputFile * generated library * @ param sourceFiles * object files * @ param config * linker configuration */ @ Override public void link ( final CCTask task , final File outputFile , final String [ ] sourceFiles , final CommandLineLinkerConfiguration config ) { } }
// delete any existing library outputFile . delete ( ) ; // build a new library super . link ( task , outputFile , sourceFiles , config ) ;
public class EscapeXML { /** * Copy the content of a Reader into the specified JSPWriter escaping characters if needed . * @ param src the Reader to read from * @ param escapeXml if true , escape characters * @ param out the JspWriter to emit to * @ throws IOException if there was a problem emitting the content */ public static void emit ( Reader src , boolean escapeXml , JspWriter out ) throws IOException { } }
int bufferSize = out . getBufferSize ( ) ; if ( bufferSize == 0 ) { bufferSize = 4096 ; } char [ ] buffer = new char [ bufferSize ] ; int count ; while ( ( count = src . read ( buffer ) ) > 0 ) { if ( escapeXml ) { emit ( buffer , 0 , count , out ) ; } else { out . write ( buffer , 0 , count ) ; } }
public class LoadBalancerTlsCertificateRenewalSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( LoadBalancerTlsCertificateRenewalSummary loadBalancerTlsCertificateRenewalSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( loadBalancerTlsCertificateRenewalSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( loadBalancerTlsCertificateRenewalSummary . getRenewalStatus ( ) , RENEWALSTATUS_BINDING ) ; protocolMarshaller . marshall ( loadBalancerTlsCertificateRenewalSummary . getDomainValidationOptions ( ) , DOMAINVALIDATIONOPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class NetworkConfig { /** * Returns the specified JsonValue as a JsonObject , or null if it ' s not an object */ private static JsonObject getJsonValueAsObject ( JsonValue value ) { } }
return ( value != null && value . getValueType ( ) == ValueType . OBJECT ) ? value . asJsonObject ( ) : null ;