signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CellFinder { /** * 起点なる位置を指定する 。 * @ param address セルのアドレス * @ return 自身のインスタンス 。 メソッドチェーンとして続ける 。 * @ throws NullPointerException { @ literal address = = null . } */ public CellFinder startPosition ( final CellPosition address ) { } }
ArgUtils . notNull ( address , "address" ) ; return startPosition ( address . getColumn ( ) , address . getRow ( ) ) ;
public class ERiCNeighborPredicate { /** * Full instantiation interface . * @ param database Database * @ param relation Relation * @ return Instance */ public Instance instantiate ( Database database , Relation < V > relation ) { } }
DistanceQuery < V > dq = database . getDistanceQuery ( relation , EuclideanDistanceFunction . STATIC ) ; KNNQuery < V > knnq = database . getKNNQuery ( dq , settings . k ) ; WritableDataStore < PCAFilteredResult > storage = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , PCAFilteredResult . class ) ; PCARunner pca = settings . pca ; EigenPairFilter filter = settings . filter ; Duration time = LOG . newDuration ( this . getClass ( ) . getName ( ) + ".preprocessing-time" ) . begin ( ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( this . getClass ( ) . getName ( ) , relation . size ( ) , LOG ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DoubleDBIDList ref = knnq . getKNNForDBID ( iditer , settings . k ) ; PCAResult pcares = pca . processQueryResult ( ref , relation ) ; storage . put ( iditer , new PCAFilteredResult ( pcares . getEigenPairs ( ) , filter . filter ( pcares . getEigenvalues ( ) ) , 1. , 0. ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; LOG . statistics ( time . end ( ) ) ; return new Instance ( relation . getDBIDs ( ) , storage , relation ) ;
public class ApiOvhCloud { /** * Usage information on your project * REST : GET / cloud / project / { serviceName } / usage / history * @ param from [ required ] Filter results having date superior to from * @ param serviceName [ required ] Service name * @ param to [ required ] Filter results having date inferior to */ public ArrayList < OvhUsageHistory > project_serviceName_usage_history_GET ( String serviceName , Date from , Date to ) throws IOException { } }
String qPath = "/cloud/project/{serviceName}/usage/history" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "from" , from ) ; query ( sb , "to" , to ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t20 ) ;
public class SqlContextImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . parser . TransformContext # getParam ( java . lang . String ) */ @ Override public Parameter getParam ( final String paramName ) { } }
Parameter param = getBindParameter ( paramName ) ; if ( param == null ) { Map < String , Parameter > constParams = getConstParameterMap ( ) ; if ( constParams != null ) { param = constParams . get ( paramName . toUpperCase ( ) ) ; } } return param ;
public class EventhubDataWriter { /** * A signature which contains the duration . * After the duration is expired , the signature becomes invalid */ public void refreshSignature ( ) { } }
if ( postStartTimestamp == 0 || ( System . nanoTime ( ) - postStartTimestamp ) > Duration . ofMinutes ( sigExpireInMinute ) . toNanos ( ) ) { // generate signature try { signature = SharedAccessSignatureTokenProvider . generateSharedAccessSignature ( sasKeyName , sasKey , namespaceName , Duration . ofMinutes ( sigExpireInMinute ) ) ; postStartTimestamp = System . nanoTime ( ) ; LOG . info ( "Signature is refreshing: " + signature ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
public class IOUtil { /** * Writing the specified contents to the specified OutputStream using an internal buffer . Flushing the stream when * completed . Caller is responsible for opening and closing the specified stream . * @ param output * The OutputStream * @ param content * The content to write to the specified stream * @ throws IOException * If a problem occured during any I / O operations */ public static void bufferedWriteWithFlush ( final OutputStream output , final byte [ ] content ) throws IOException { } }
final int size = 4096 ; int offset = 0 ; while ( content . length - offset > size ) { output . write ( content , offset , size ) ; offset += size ; } output . write ( content , offset , content . length - offset ) ; output . flush ( ) ;
public class ProxyUtil { /** * Creates a { @ link Proxy } of the given { @ code proxyInterface } * that uses the given { @ link JsonRpcClient } . * @ param < T > the proxy type * @ param classLoader the { @ link ClassLoader } * @ param proxyInterface the interface to proxy * @ param client the { @ link JsonRpcClient } * @ param socket the { @ link Socket } * @ return the proxied interface * @ throws IOException if an I / O error occurs when creating the input stream , the output stream , the socket * is closed , the socket is not connected , or the socket input has been shutdown using shutdownInput ( ) */ @ SuppressWarnings ( "WeakerAccess" ) public static < T > T createClientProxy ( ClassLoader classLoader , Class < T > proxyInterface , final JsonRpcClient client , Socket socket ) throws IOException { } }
return createClientProxy ( classLoader , proxyInterface , client , socket . getInputStream ( ) , socket . getOutputStream ( ) ) ;
public class Base64Serializer { /** * Deserialze base 64 encoded string data to Object . */ public Object deserialize ( String data ) { } }
if ( ( data == null ) || ( data . length ( ) == 0 ) ) { return null ; } ObjectInputStream ois = null ; ByteArrayInputStream bis = null ; try { bis = new ByteArrayInputStream ( Base64 . decodeBase64 ( data . getBytes ( ) ) ) ; ois = new ObjectInputStream ( bis ) ; return ois . readObject ( ) ; } catch ( ClassNotFoundException e ) { LOGGER . error ( "Can't deserialize data from Base64" , e ) ; throw new IllegalArgumentException ( e ) ; } catch ( IOException e ) { LOGGER . error ( "Can't deserialize data from Base64" , e ) ; throw new IllegalArgumentException ( e ) ; } catch ( Exception e ) { LOGGER . error ( "Can't deserialize data from Base64" , e ) ; throw new IllegalArgumentException ( e ) ; } finally { try { if ( ois != null ) { ois . close ( ) ; } } catch ( Exception e ) { LOGGER . error ( "Can't close ObjetInputStream used for deserialize data from Base64" , e ) ; } }
public class Op { /** * Creates an < i > operation expression < / i > on the specified target object . * @ param target the target object on which the expression will execute * @ return an operator , ready for chaining */ public static < T > Level0ListOperator < List < T > , T > onList ( final List < T > target ) { } }
return new Level0ListOperator < List < T > , T > ( ExecutionTarget . forOp ( target , Normalisation . LIST ) ) ;
public class CheckTypeQualifiers { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . bcel . CFGDetector # visitClass ( edu . umd . cs . findbugs . classfile * . ClassDescriptor ) */ @ Override public void visitClass ( ClassDescriptor classDescriptor ) throws CheckedAnalysisException { } }
if ( ! checked ) { checked = true ; Collection < TypeQualifierValue < ? > > allKnownTypeQualifiers = TypeQualifierValue . getAllKnownTypeQualifiers ( ) ; int size = allKnownTypeQualifiers . size ( ) ; if ( size == 1 ) { TypeQualifierValue < ? > value = Util . first ( allKnownTypeQualifiers ) ; if ( ! value . typeQualifier . getClassName ( ) . equals ( NONNULL_ANNOTATION ) ) { shouldRunAnalysis = true ; } } else if ( size > 1 ) { shouldRunAnalysis = true ; } } if ( shouldRunAnalysis ) { super . visitClass ( classDescriptor ) ; }
public class ForeignKeyConstraint { /** * creates a single - attribute foreign key * @ param name * @ param attribute * @ param reference * @ return */ public static ForeignKeyConstraint of ( String name , Attribute attribute , Attribute reference ) { } }
return new Builder ( ( DatabaseRelationDefinition ) attribute . getRelation ( ) , ( DatabaseRelationDefinition ) reference . getRelation ( ) ) . add ( attribute , reference ) . build ( name ) ;
public class Util { /** * Replaces any occurnace of the specified " oldChar " with the nes string . * This is used to replace occurances if ' $ ' in corba names since ' $ ' is a special character */ private static String replace ( String source , char oldChar , String newString ) { } }
StringBuilder StringBuilder = new StringBuilder ( source . length ( ) ) ; for ( int i = 0 ; i < source . length ( ) ; i ++ ) { char c = source . charAt ( i ) ; if ( c == oldChar ) { StringBuilder . append ( newString ) ; } else { StringBuilder . append ( c ) ; } } return StringBuilder . toString ( ) ;
public class cachepolicy_binding { /** * Use this API to fetch cachepolicy _ binding resource of given name . */ public static cachepolicy_binding get ( nitro_service service , String policyname ) throws Exception { } }
cachepolicy_binding obj = new cachepolicy_binding ( ) ; obj . set_policyname ( policyname ) ; cachepolicy_binding response = ( cachepolicy_binding ) obj . get_resource ( service ) ; return response ;
public class CacheableWorkspaceDataManager { /** * Initialization remote commands . */ private void initRemoteCommands ( ) { } }
if ( rpcService != null ) { // register commands suspend = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.dataflow.persistent.CacheableWorkspaceDataManager-suspend-" + dataContainer . getUniqueName ( ) ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { suspendLocally ( ) ; return null ; } } ) ; resume = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.dataflow.persistent.CacheableWorkspaceDataManager-resume-" + dataContainer . getUniqueName ( ) ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { resumeLocally ( ) ; return null ; } } ) ; requestForResponsibleForResuming = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.dataflow.persistent.CacheableWorkspaceDataManager" + "-requestForResponsibilityForResuming-" + dataContainer . getUniqueName ( ) ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { return isResponsibleForResuming . get ( ) ; } } ) ; }
public class JBBPDslBuilder { /** * Add named double field . * @ param name name of the field , can be null for anonymous * @ return the builder instance , must not be null */ public JBBPDslBuilder Double ( final String name ) { } }
final Item item = new Item ( BinType . DOUBLE , name , this . byteOrder ) ; this . addItem ( item ) ; return this ;
public class Task { /** * call should not be synchronized since can run only once in queue cycle */ public void run ( ) { } }
if ( this . isActive ) { try { perform ( ) ; // notify listener if ( this . listener != null ) { this . listener . onTerminate ( ) ; } } catch ( Exception e ) { logger . error ( "Could not execute task " + this . taskId + ": " + e . getMessage ( ) , e ) ; if ( this . listener != null ) { listener . handlerError ( e ) ; } } }
public class Schema { /** * Adds a field with the field name and the type information . Required . * This method can be called multiple times . The call order of this method defines * also the order of the fields in a row . * @ param fieldName the field name * @ param fieldType the type information of the field */ public Schema field ( String fieldName , TypeInformation < ? > fieldType ) { } }
field ( fieldName , TypeStringUtils . writeTypeInfo ( fieldType ) ) ; return this ;
public class ShExGenerator { /** * Generate a definition for a referenced element * @ param sd Containing structure definition * @ param ed Inner element * @ return ShEx representation of element reference */ private String genInnerTypeDef ( StructureDefinition sd , ElementDefinition ed ) { } }
String path = ed . hasBase ( ) ? ed . getBase ( ) . getPath ( ) : ed . getPath ( ) ; ; ST element_reference = tmplt ( SHAPE_DEFINITION_TEMPLATE ) ; element_reference . add ( "resourceDecl" , "" ) ; // Not a resource element_reference . add ( "id" , path ) ; String comment = ed . getShort ( ) ; element_reference . add ( "comment" , comment == null ? " " : "# " + comment ) ; List < String > elements = new ArrayList < String > ( ) ; for ( ElementDefinition child : ProfileUtilities . getChildList ( sd , path , null ) ) elements . add ( genElementDefinition ( sd , child ) ) ; element_reference . add ( "elements" , StringUtils . join ( elements , "\n" ) ) ; return element_reference . render ( ) ;
public class Stream { /** * Zip together the " a " and " b " iterators until all of them runs out of values . * Each pair of values is combined into a single value using the supplied zipFunction function . * @ param a * @ param b * @ param valueForNoneA value to fill if " a " runs out of values first . * @ param valueForNoneB value to fill if " b " runs out of values first . * @ param zipFunction * @ return */ public static < A , B , R > Stream < R > zip ( final A [ ] a , final B [ ] b , final A valueForNoneA , final B valueForNoneB , final BiFunction < ? super A , ? super B , R > zipFunction ) { } }
return zip ( ObjIteratorEx . of ( a ) , ObjIteratorEx . of ( b ) , valueForNoneA , valueForNoneB , zipFunction ) ;
public class RemoteTaskRunner { /** * Adds a task to the pending queue */ @ VisibleForTesting RemoteTaskRunnerWorkItem addPendingTask ( final Task task ) { } }
log . info ( "Added pending task %s" , task . getId ( ) ) ; final RemoteTaskRunnerWorkItem taskRunnerWorkItem = new RemoteTaskRunnerWorkItem ( task . getId ( ) , task . getType ( ) , null , null , task . getDataSource ( ) ) ; pendingTaskPayloads . put ( task . getId ( ) , task ) ; pendingTasks . put ( task . getId ( ) , taskRunnerWorkItem ) ; runPendingTasks ( ) ; return taskRunnerWorkItem ;
public class SegmentMeanShiftSearch { /** * Returns the Euclidean distance squared between the two vectors */ public static float distanceSq ( float [ ] a , float [ ] b ) { } }
float ret = 0 ; for ( int i = 0 ; i < a . length ; i ++ ) { float d = a [ i ] - b [ i ] ; ret += d * d ; } return ret ;
public class ConnectionWriteCompletedCallback { /** * being F176003 , F181603.2 , D192359 */ protected void proddle ( ) throws SIConnectionDroppedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "proddle" ) ; boolean useThisThread = false ; synchronized ( priorityQueue ) { synchronized ( this ) { if ( idle ) { useThisThread = isWorkAvailable ( ) ; idle = ! useThisThread ; } } } if ( useThisThread ) { doWork ( false ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "proddle" ) ;
public class AbstractSoundUploadQuery { /** * Specify an individual tag associated with the sound . * @ param tag The tag associated with the sound * @ return The current { @ link UploadSound } instance */ @ SuppressWarnings ( "unchecked" ) public T tag ( final String tag ) { } }
if ( this . tags == null ) { this . tags = new HashSet < > ( ) ; } this . tags . add ( tag ) ; return ( T ) this ;
public class WebApplicationContext { public void destroy ( ) { } }
super . destroy ( ) ; if ( isStarted ( ) ) throw new IllegalStateException ( ) ; _defaultsDescriptor = null ; _war = null ; _configurationClassNames = null ; if ( _resourceAliases != null ) _resourceAliases . clear ( ) ; _resourceAliases = null ; _contextListeners = null ; if ( _errorPages != null ) _errorPages . clear ( ) ; _errorPages = null ;
public class HadoopInputSplit { private void writeObject ( ObjectOutputStream out ) throws IOException { } }
// serialize the parent fields and the final fields out . defaultWriteObject ( ) ; // the job conf knows how to serialize itself jobConf . write ( out ) ; // write the input split hadoopInputSplit . write ( out ) ;
public class ShowProducerAction { /** * Allows to set all stats to decorator except cumulated stat . * Stats will be sorted using given sort type . * @ param statDecoratorBean { @ link StatDecoratorBean } * @ param allStatLines list of { @ link StatLineAO } , all stats present in producer * @ param sortType { @ link StatBeanSortType } */ private void populateStats ( final StatDecoratorBean statDecoratorBean , final List < StatLineAO > allStatLines , final StatBeanSortType sortType ) { } }
if ( allStatLines == null || allStatLines . isEmpty ( ) ) { LOGGER . warn ( "Producer's stats are empty" ) ; return ; } final int cumulatedIndex = getCumulatedIndex ( allStatLines ) ; // stats int allStatLinesSize = allStatLines . size ( ) ; final List < StatBean > statBeans = new ArrayList < > ( allStatLinesSize ) ; for ( int i = 0 ; i < allStatLinesSize ; i ++ ) { if ( i == cumulatedIndex ) continue ; final StatLineAO line = allStatLines . get ( i ) ; final List < StatValueAO > statValues = line . getValues ( ) ; final StatBean statBean = new StatBean ( ) ; statBean . setName ( line . getStatName ( ) ) ; statBean . setValues ( statValues ) ; statBeans . add ( statBean ) ; } // sort stat beans StaticQuickSorter . sort ( statBeans , sortType ) ; // set stats statDecoratorBean . setStats ( statBeans ) ;
public class DescribeTrailsResult { /** * The list of trail objects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTrailList ( java . util . Collection ) } or { @ link # withTrailList ( java . util . Collection ) } if you want to * override the existing values . * @ param trailList * The list of trail objects . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeTrailsResult withTrailList ( Trail ... trailList ) { } }
if ( this . trailList == null ) { setTrailList ( new com . amazonaws . internal . SdkInternalList < Trail > ( trailList . length ) ) ; } for ( Trail ele : trailList ) { this . trailList . add ( ele ) ; } return this ;
public class Datatype_Builder { /** * Sets the value to be returned by { @ link Datatype # getRebuildableType ( ) } . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code rebuildableType } is null */ public Datatype . Builder setRebuildableType ( TypeClass rebuildableType ) { } }
this . rebuildableType = Objects . requireNonNull ( rebuildableType ) ; return ( Datatype . Builder ) this ;
public class Sherdog { /** * Get a fighter via it ; s sherdog URL . * @ param sherdogUrl the shergod url of the fighter * @ return a Fighter an all his fights * @ throws IOException if connecting to sherdog fails * @ throws ParseException if the page structure has changed * @ throws SherdogParserException if anythign related to the parser goes wrong */ public Fighter getFighter ( String sherdogUrl ) throws IOException , ParseException , SherdogParserException { } }
return new FighterParser ( pictureProcessor , zoneId ) . parse ( sherdogUrl ) ;
public class ApplicationUtils { /** * ( non - Javadoc ) * @ see com . ibm . ws . webcontainer . security . WebAppSecurityConfigChangeListener # notifyWebAppSecurityConfigChanged ( com . ibm . ws . webcontainer . security . WebAppSecurityConfigChangeEvent ) */ @ Override public void notifyWebAppSecurityConfigChanged ( WebAppSecurityConfigChangeEvent event ) { } }
List < String > attributes = event . getModifiedAttributeList ( ) ; if ( isAppRestartRequired ( attributes ) ) { recycleApplications ( ) ; }
public class TopologyContext { /** * Register a IMetric instance . * Storm will then call getValueAndReset on the metric every timeBucketSizeInSecs * and the returned value is sent to all metrics consumers . * You must call this during IBolt : : prepare or ISpout : : open . * @ return The IMetric argument unchanged . */ @ SuppressWarnings ( "unchecked" ) public < T extends IMetric > T registerMetric ( String name , T metric , int timeBucketSizeInSecs ) { } }
MetricDelegate d = new MetricDelegate ( metric ) ; delegate . registerMetric ( name , d , timeBucketSizeInSecs ) ; return metric ;
public class CustomerSyncSelector { /** * Gets the dateTimeRange value for this CustomerSyncSelector . * @ return dateTimeRange * Only return entities that have changed during the specified * time range . String Format : yyyyMMdd * HHmmss < Timezone ID > ( for example , 20100609 150223 * America / New _ York ) . See the < a * href = " https : / / developers . google . com / adwords / api / docs / appendix / timezones " > Timezones < / a > * page for * the complete list of Timezone IDs . * < span class = " constraint Required " > This field is required * and should not be { @ code null } . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . DateTimeRange getDateTimeRange ( ) { } }
return dateTimeRange ;
public class LayerableConfig { /** * Imports the layerable config from node . * @ param root The root reference ( must not be < code > null < / code > ) . * @ return The layerable data . * @ throws LionEngineException If unable to read node or invalid integer . */ public static LayerableConfig imports ( Xml root ) { } }
Check . notNull ( root ) ; final Xml node = root . getChild ( NODE_LAYERABLE ) ; final int layerRefresh = node . readInteger ( ATT_REFRESH ) ; final int layerDisplay = node . readInteger ( ATT_DISPLAY ) ; return new LayerableConfig ( layerRefresh , layerDisplay ) ;
public class BucketInfoMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BucketInfo bucketInfo , ProtocolMarshaller protocolMarshaller ) { } }
if ( bucketInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( bucketInfo . getBuckets ( ) , BUCKETS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommonOps_DDF3 { /** * < p > Performs the following operation : < br > * < br > * a = a - b < br > * a < sub > i < / sub > = a < sub > i < / sub > - b < sub > i < / sub > < br > * @ param a A Vector . Modified . * @ param b A Vector . Not modified . */ public static void subtractEquals ( DMatrix3 a , DMatrix3 b ) { } }
a . a1 -= b . a1 ; a . a2 -= b . a2 ; a . a3 -= b . a3 ;
public class AddThenHideOldStrategy { /** * Take in two resources . If one ( only ) is beta return the non beta one */ private RepositoryResource returnNonBetaResourceOrNull ( RepositoryResource res1 , RepositoryResource res2 ) { } }
if ( isBeta ( res1 ) && ! isBeta ( res2 ) ) { return res2 ; } else if ( ! isBeta ( res1 ) && isBeta ( res2 ) ) { return res1 ; } else { return null ; }
public class SuffixDictionary { /** * 查找是否有该后缀 * @ param suffix * @ return */ public int get ( String suffix ) { } }
suffix = reverse ( suffix ) ; Integer length = trie . get ( suffix ) ; if ( length == null ) return 0 ; return length ;
public class DefaultPageMounter { /** * { @ inheritDoc } * A convenience method that uses a default coding strategy . */ @ Override public void addMountPoint ( String path , Class < ? extends Page > pageClass ) { } }
LOGGER . debug ( "Adding mount point for path {} = {}" , path , pageClass . getName ( ) ) ; mountPoints . add ( new DefaultMountPointInfo ( path , pageClass ) ) ;
public class DownloadDispatcher { /** * / * read data from input stream */ int readFromInputStream ( byte [ ] buffer , InputStream is ) { } }
try { return is . read ( buffer ) ; } catch ( IOException e ) { if ( END_OF_STREAM . equals ( e . getMessage ( ) ) ) { return - 2 ; } return Integer . MIN_VALUE ; }
public class DynamicConfigManager { /** * < p > Method to reload the configuration from admin for a given destination * and apply any changes to the in memory DestinationHandler . < / p > * @ param destinationHandler * @ throws SIStoreException * @ throws SICommsException * @ throws SIResourceException * @ throws SICoreException */ private void reloadDestinationFromAdmin ( DestinationHandler destinationHandler ) throws SIResourceException , SINotPossibleInCurrentConfigurationException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reloadDestinationFromAdmin" ) ; try { // 577657 Lookup dest by uuid . We want to find destinations that have been altered - not ones // that have been deleted and recreated . BaseDestinationDefinition bdd = _messageProcessor . getMessagingEngine ( ) . getSIBDestinationByUuid ( destinationHandler . getBus ( ) , destinationHandler . getUuid ( ) . toString ( ) ) ; // TODO when admin provide an equals method , we can use it to see if any // update is necessary // if ( add . equals ( ) ) if ( ( ! bdd . isAlias ( ) ) && ( ! bdd . isForeign ( ) ) && ( destinationHandler . getDestinationType ( ) != DestinationType . SERVICE ) ) { // Update the definition destinationHandler . updateDefinition ( bdd ) ; Set queuePointLocalitySet = _messageProcessor . getMessagingEngine ( ) . getSIBDestinationLocalitySet ( destinationHandler . getBus ( ) , destinationHandler . getUuid ( ) . toString ( ) ) ; // There must be a queue point if ( ( queuePointLocalitySet == null ) || ( queuePointLocalitySet . size ( ) == 0 ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reloadDestinationFromAdmin" , "SIErrorException" ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_CONFIGURATION_ERROR_CWSIP0006" , new Object [ ] { "DynamicConfigManager" , "1:245:1.32" , destinationHandler . getName ( ) } , null ) ) ; } if ( bdd . isLocal ( ) ) { DestinationDefinition dd = ( DestinationDefinition ) bdd ; BaseDestinationHandler bdh = ( BaseDestinationHandler ) destinationHandler ; bdh . updateLocalizationSet ( queuePointLocalitySet ) ; } else { // This is either an alias destination or a foreign destination destinationHandler . updateDefinition ( bdd ) ; } } } // Catch Admin ' s SIBExceptionDestinationNotFound exception catch ( SIBExceptionDestinationNotFound e ) { // No FFDC code needed // The destination no longer exists . Delete it . // Dont delete local destinations unless explicitly told to through // the admin interface if ( ! ( ! destinationHandler . isAlias ( ) && ! destinationHandler . isForeign ( ) && destinationHandler . hasLocal ( ) ) ) { deleteDestination ( destinationHandler ) ; } else { // FFDC - Admin should have deleted any local destinations directly already FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.DynamicConfigManager.reloadDestinationFromAdmin" , "1:325:1.32" , this ) ; SibTr . exception ( tc , e ) ; } } catch ( SIBExceptionBase e ) { // No FFDC code needed // TODO - handle this } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reloadDestinationFromAdmin" ) ;
public class DefaultAESCBCCipher { /** * A method to decrypt the provided cipher text . * @ param cipherText AES encrypted cipherText * @ return decrypted text */ @ Override public String decrypt ( String cipherText ) { } }
this . decryptionLock . lock ( ) ; try { String decryptedValue = null ; try { byte [ ] bytes = Base64 . getDecoder ( ) . decode ( cipherText ) ; decryptedValue = new String ( decryptCipher . doFinal ( bytes ) , charset ) ; } catch ( UnsupportedEncodingException | IllegalBlockSizeException | BadPaddingException e ) { logger . error ( "DefaultAESCBCCipher failed to decrypt text" , e ) ; throw new JsonDBException ( "DefaultAESCBCCipher failed to decrypt text" , e ) ; } return decryptedValue ; } finally { this . decryptionLock . unlock ( ) ; }
public class LatencyMetrics { /** * takes nanoseconds * */ public void addNano ( long nanos ) { } }
// convert to microseconds . 1 millionth latency . update ( nanos , TimeUnit . NANOSECONDS ) ; totalLatency . inc ( nanos / 1000 ) ; totalLatencyHistogram . add ( nanos / 1000 ) ; recentLatencyHistogram . add ( nanos / 1000 ) ; for ( LatencyMetrics parent : parents ) { parent . addNano ( nanos ) ; }
public class Session { /** * Check whether the remote maestrano session is still valid * package private for testing */ boolean performRemoteCheck ( MnoHttpClient httpClient ) { } }
// Prepare request String url = sso . getSessionCheckUrl ( this . uid , this . sessionToken ) ; String respStr ; try { respStr = httpClient . get ( url ) ; } catch ( AuthenticationException e1 ) { // TODO log error e1 . printStackTrace ( ) ; return false ; } catch ( ApiException e1 ) { // TODO log error e1 . printStackTrace ( ) ; return false ; } // Parse response Gson gson = new Gson ( ) ; Type type = new TypeToken < Map < String , String > > ( ) { } . getType ( ) ; Map < String , String > respObj = gson . fromJson ( respStr , type ) ; Boolean isValid = ( respObj . get ( "valid" ) != null && respObj . get ( "valid" ) . equals ( "true" ) ) ; if ( isValid ) { try { this . recheck = MnoDateHelper . fromIso8601 ( respObj . get ( "recheck" ) ) ; } catch ( Exception e ) { return false ; } return true ; } return false ;
public class Checker { /** * Reads a list of API signatures from the given URL . */ public void parseSignaturesFile ( URL url ) throws IOException , ParseException { } }
parseSignaturesFile ( url . openStream ( ) , url . toString ( ) ) ;
public class IFixCompareCommandTask { /** * { @ inheritDoc } */ @ Override protected void doExecute ( ExecutionContext context ) { } }
CommandConsole console = context . getCommandConsole ( ) ; /* * There are two uses of the compare command , they may have supplied the - - apar option or the - - to option , if neither is supplied then this is illegal use so put an error * out */ boolean toSet = context . optionExists ( INSTALL_LOCATION_TO_COMPARE_TO_OPTION ) ; boolean aparSet = context . optionExists ( APAR_TO_COMPARE_OPTION ) ; if ( ! toSet && ! aparSet ) { console . printlnErrorMessage ( getMessage ( "compare.no.option.set" ) ) ; return ; } // Both commands need the iFix information for the current install so grab it now so it ' s only loaded once File wlpInstallationDirectory = context . getAttribute ( CommandConstants . WLP_INSTALLATION_LOCATION , File . class ) ; InstalledIFixInformation installedIFixes = findInstalledIFixes ( wlpInstallationDirectory , console , context . optionExists ( VERBOSE_OPTION ) ) ; if ( toSet ) { compareToInstallLocation ( context , console , installedIFixes . validIFixes , wlpInstallationDirectory ) ; } if ( aparSet ) { compareToAparList ( context , console , installedIFixes . validIFixes , wlpInstallationDirectory ) ; } // Finally list all of the iFixes that were not applicable if ( ! installedIFixes . iFixesWithMissingFiles . isEmpty ( ) ) { console . printlnInfoMessage ( "" ) ; console . printlnInfoMessage ( getMessage ( "compare.invalid.ifixes.missing" , installedIFixes . iFixesWithMissingFiles ) ) ; } if ( ! installedIFixes . inapplicableIFixes . isEmpty ( ) ) { console . printlnInfoMessage ( "" ) ; console . printlnInfoMessage ( getMessage ( "compare.invalid.ifixes.badversion" , installedIFixes . inapplicableIFixes ) ) ; } if ( ! installedIFixes . iFixesWithInvalidXml . isEmpty ( ) ) { console . printlnInfoMessage ( "" ) ; console . printlnErrorMessage ( getMessage ( "compare.invalid.ifixes.bad.xml" , installedIFixes . iFixesWithInvalidXml ) ) ; }
public class EnvironmentPlatform { /** * The list of programming languages that are available for the specified platform . * @ param languages * The list of programming languages that are available for the specified platform . */ public void setLanguages ( java . util . Collection < EnvironmentLanguage > languages ) { } }
if ( languages == null ) { this . languages = null ; return ; } this . languages = new java . util . ArrayList < EnvironmentLanguage > ( languages ) ;
public class BooleansConverter { /** * Create a boolean value from given string . Returns boolean true only if string is one of next constants : < em > true < / em > , * < em > yes < / em > , < em > 1 < / em > , < em > on < / em > ; otherwise returns false . Note that comparison is not case sensitive . */ @ Override public < T > T asObject ( String string , Class < T > valueType ) { } }
// at this point value type is a boolean or a boxing boolean string = string . toLowerCase ( ) ; if ( string . equals ( "true" ) ) { return ( T ) ( Boolean ) true ; } if ( string . equals ( "yes" ) ) { return ( T ) ( Boolean ) true ; } if ( string . equals ( "1" ) ) { return ( T ) ( Boolean ) true ; } if ( string . equals ( "on" ) ) { return ( T ) ( Boolean ) true ; } return ( T ) ( Boolean ) false ;
public class GConvertImage { /** * Converts an image from one type to another type . Creates a new image instance if * an output is not provided . * @ param src Input image . Not modified . * @ param dst Converted output image . If null a new one will be declared . Modified . * @ param typeDst The type of output image . * @ return Converted image . */ public static < T extends ImageGray < T > > T convert ( ImageGray < ? > src , T dst , Class < T > typeDst ) { } }
if ( dst == null ) { dst = ( T ) GeneralizedImageOps . createSingleBand ( typeDst , src . width , src . height ) ; } else { InputSanityCheck . checkSameShape ( src , dst ) ; } convert ( src , dst ) ; return dst ;
public class ListView { /** * Adds the given item to this view . * @ since 1.389 */ @ Override public void add ( TopLevelItem item ) throws IOException { } }
synchronized ( this ) { jobNames . add ( item . getRelativeNameFrom ( getOwner ( ) . getItemGroup ( ) ) ) ; } save ( ) ;
public class BsonUuidSerializer { /** * Utility routine for converting UUIDs to bytes in little endian format . * @ param uuid The UUID to convert * @ return a byte array representing the UUID in little endian format */ protected static byte [ ] uuidToLittleEndianBytes ( UUID uuid ) { } }
long msb = uuid . getMostSignificantBits ( ) ; long lsb = uuid . getLeastSignificantBits ( ) ; byte [ ] buffer = new byte [ 16 ] ; for ( int i = 0 ; i < 8 ; i ++ ) { buffer [ i ] = ( byte ) ( msb >>> 8 * i ) ; } for ( int i = 8 ; i < 16 ; i ++ ) { buffer [ i ] = ( byte ) ( lsb >>> 8 * ( i - 16 ) ) ; } return buffer ;
public class AbstractMetricsDispatcher { /** * Register Jackson module that maps enums as lowercase . Per http : / / stackoverflow . com / a / 24173645. */ @ SuppressWarnings ( "rawtypes" ) private static void registerEnumModule ( ObjectMapper mapper ) { } }
SimpleModule module = new SimpleModule ( ) ; module . setDeserializerModifier ( new BeanDeserializerModifier ( ) { @ Override public JsonDeserializer < Enum > modifyEnumDeserializer ( DeserializationConfig config , final JavaType type , BeanDescription beanDesc , final JsonDeserializer < ? > deserializer ) { return new JsonDeserializer < Enum > ( ) { @ Override public Enum deserialize ( JsonParser jp , DeserializationContext ctxt ) throws IOException { @ SuppressWarnings ( "unchecked" ) Class < ? extends Enum > rawClass = ( Class < Enum < ? > > ) type . getRawClass ( ) ; return Enum . valueOf ( rawClass , jp . getValueAsString ( ) . toUpperCase ( ) ) ; } } ; } } ) ; module . addSerializer ( Enum . class , new StdSerializer < Enum > ( Enum . class ) { @ Override public void serialize ( Enum value , JsonGenerator jgen , SerializerProvider provider ) throws IOException { jgen . writeString ( value . name ( ) . toLowerCase ( ) ) ; } } ) ; mapper . registerModule ( module ) ;
public class StringParser { /** * Parse the given { @ link String } as { @ link Byte } with radix * { @ value # DEFAULT _ RADIX } . * @ param sStr * The String to parse . May be < code > null < / code > . * @ return < code > null < / code > if the string does not represent a valid value . */ @ Nullable public static Byte parseByteObj ( @ Nullable final String sStr ) { } }
return parseByteObj ( sStr , DEFAULT_RADIX , null ) ;
public class IntervalRBTree { /** * Checks if is red . * @ param n the n * @ return true , if is red */ private boolean isRed ( IntervalRBTreeNode < T > n ) { } }
if ( n == null ) { return false ; } return n . color == IntervalRBTreeNode . RED ;
public class BottomSheet { /** * Inflates the layout , which is used to show the bottom sheet ' s content . The layout may either * be the default one or a custom view , if one has been set before . */ private void inflateContentView ( ) { } }
contentContainer = rootView . findViewById ( R . id . content_container ) ; contentContainer . removeAllViews ( ) ; if ( customView != null ) { contentContainer . setVisibility ( View . VISIBLE ) ; contentContainer . addView ( customView ) ; } else if ( customViewId != - 1 ) { contentContainer . setVisibility ( View . VISIBLE ) ; LayoutInflater layoutInflater = LayoutInflater . from ( getContext ( ) ) ; View view = layoutInflater . inflate ( customViewId , contentContainer , false ) ; contentContainer . addView ( view ) ; } else { LayoutInflater layoutInflater = LayoutInflater . from ( getContext ( ) ) ; View view = layoutInflater . inflate ( R . layout . bottom_sheet_grid_view , contentContainer , false ) ; contentContainer . addView ( view ) ; } showGridView ( ) ;
public class AWSIotClient { /** * Configures or reconfigures the Device Defender audit settings for this account . Settings include how audit * notifications are sent and which audit checks are enabled or disabled . * @ param updateAccountAuditConfigurationRequest * @ return Result of the UpdateAccountAuditConfiguration operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws InternalFailureException * An unexpected error has occurred . * @ sample AWSIot . UpdateAccountAuditConfiguration */ @ Override public UpdateAccountAuditConfigurationResult updateAccountAuditConfiguration ( UpdateAccountAuditConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateAccountAuditConfiguration ( request ) ;
public class StudioModel { /** * Sets JDBC | url | . * @ param url JDBC URL * @ see # getUrl */ public void setUrl ( final String url ) { } }
final String old = this . url ; this . url = url ; this . connectionConfig = System . currentTimeMillis ( ) ; this . connectionValidated = false ; this . pcs . firePropertyChange ( "url" , old , this . url ) ;
public class Bounds { /** * Create a new { @ code Bounds } object with the given extent . * @ param minLatitude the minimum latitude * @ param minLongitude the minimum longitude * @ param maxLatitude the maximum latitude * @ param maxLongitude the maximum longitude * @ return a new { @ code Bounds } object with the given extent * @ throws NullPointerException if one of the arguments is { @ code null } */ public static Bounds of ( final Latitude minLatitude , final Longitude minLongitude , final Latitude maxLatitude , final Longitude maxLongitude ) { } }
return new Bounds ( minLatitude , minLongitude , maxLatitude , maxLongitude ) ;
public class MessageSetImpl { /** * Gets up to a given amount of messages in the given channel around a given message in any channel . * The given message will be part of the result in addition to the messages around if it was sent in the given * channel and does not count towards the limit . * Half of the messages will be older than the given message and half of the messages will be newer . * If there aren ' t enough older or newer messages , the actual amount of messages will be less than the given limit . * It ' s also not guaranteed to be perfectly balanced . * @ param channel The channel of the messages . * @ param limit The limit of messages to get . * @ param around Get messages around the message with this id . * @ return The messages . * @ see # getMessagesAroundAsStream ( TextChannel , long ) */ public static CompletableFuture < MessageSet > getMessagesAround ( TextChannel channel , int limit , long around ) { } }
CompletableFuture < MessageSet > future = new CompletableFuture < > ( ) ; channel . getApi ( ) . getThreadPool ( ) . getExecutorService ( ) . submit ( ( ) -> { try { // calculate the half limit . int halfLimit = limit / 2 ; // get the newer half MessageSet newerMessages = getMessagesAfter ( channel , halfLimit , around ) . join ( ) ; // get the older half + around message MessageSet olderMessages = getMessagesBefore ( channel , halfLimit + 1 , around + 1 ) . join ( ) ; // remove the oldest message if the around message is not part of the result while there is a result , // for example because the around message was from a different channel if ( olderMessages . getNewestMessage ( ) . map ( DiscordEntity :: getId ) . map ( id -> id != around ) . orElse ( false ) ) { olderMessages = olderMessages . tailSet ( olderMessages . getOldestMessage ( ) . orElseThrow ( AssertionError :: new ) , false ) ; } // combine the messages into one collection Collection < Message > messages = Stream . of ( olderMessages , newerMessages ) . flatMap ( Collection :: stream ) . collect ( Collectors . toList ( ) ) ; // we are done future . complete ( new MessageSetImpl ( messages ) ) ; } catch ( Throwable t ) { future . completeExceptionally ( t ) ; } } ) ; return future ;
public class ArrayKit { /** * Arr1 union Arr2 * @ param arr1 * @ param arr2 */ public static String [ ] union ( String [ ] arr1 , String [ ] arr2 ) { } }
Set < String > set = new HashSet < String > ( ) ; for ( String str : arr1 ) { set . add ( str ) ; } for ( String str : arr2 ) { set . add ( str ) ; } String [ ] result = { } ; return set . toArray ( result ) ;
public class Container { /** * Get the free level space , i . e . container height with height of * levels subtracted . * @ return free height and box dimension */ public Dimension getFreeLevelSpace ( ) { } }
int remainder = height - getStackHeight ( ) ; if ( remainder < 0 ) { throw new IllegalArgumentException ( "Remaining free space is negative at " + remainder + " for " + this ) ; } return new Dimension ( width , depth , remainder ) ;
public class FRAG { /** * Send all fragments as separate messages ( with same ID ! ) . * Example : * < pre > * Given the generated ID is 2344 , number of fragments = 3 , message { dst , src , buf } * would be fragmented into : * [ 2344,3,0 ] { dst , src , buf1 } , * [ 2344,3,1 ] { dst , src , buf2 } and * [ 2344,3,2 ] { dst , src , buf3} * < / pre > */ private void fragment ( Message msg , long size ) { } }
Address dest = msg . getDest ( ) , src = msg . getSrc ( ) ; long frag_id = curr_id . getAndIncrement ( ) ; // used as seqnos int num_frags ; try { // write message into a byte buffer and fragment it ByteArrayDataOutputStream dos = new ByteArrayDataOutputStream ( ( int ) ( size + 50 ) ) ; msg . writeTo ( dos ) ; byte [ ] buffer = dos . buffer ( ) ; byte [ ] [ ] fragments = Util . fragmentBuffer ( buffer , frag_size , dos . position ( ) ) ; num_frags = fragments . length ; num_sent_frags += num_frags ; if ( log . isTraceEnabled ( ) ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( "fragmenting packet to " ) . append ( dest != null ? dest . toString ( ) : "<all members>" ) . append ( " (size=" ) . append ( buffer . length ) . append ( ") into " ) . append ( num_frags ) . append ( " fragment(s) [frag_size=" ) . append ( frag_size ) . append ( ']' ) ; log . trace ( sb . toString ( ) ) ; } for ( int i = 0 ; i < num_frags ; i ++ ) { Message frag_msg = new Message ( dest , fragments [ i ] ) . src ( src ) ; FragHeader hdr = new FragHeader ( frag_id , i , num_frags ) ; frag_msg . putHeader ( this . id , hdr ) ; down_prot . down ( frag_msg ) ; } } catch ( Exception e ) { log . error ( Util . getMessage ( "ExceptionOccurredTryingToFragmentMessage" ) , e ) ; }
public class Fat { /** * Write the contents of this FAT to the given device at the given offset . * @ param offset the device offset where to write the FAT copy * @ throws IOException on write error */ public void writeCopy ( long offset ) throws IOException { } }
final byte [ ] data = new byte [ sectorCount * sectorSize ] ; for ( int index = 0 ; index < entries . length ; index ++ ) { fatType . writeEntry ( data , index , entries [ index ] ) ; } device . write ( offset , ByteBuffer . wrap ( data ) ) ;
public class IpcLogEntry { /** * Add a request header value . For special headers in { @ link NetflixHeader } it will * automatically fill in the more specific fields based on the header values . */ public IpcLogEntry addRequestHeader ( String name , String value ) { } }
if ( clientAsg == null && name . equalsIgnoreCase ( NetflixHeader . ASG . headerName ( ) ) ) { withClientAsg ( value ) ; } else if ( clientZone == null && name . equalsIgnoreCase ( NetflixHeader . Zone . headerName ( ) ) ) { withClientZone ( value ) ; } else if ( clientNode == null && name . equalsIgnoreCase ( NetflixHeader . Node . headerName ( ) ) ) { withClientNode ( value ) ; } else if ( vip == null && name . equalsIgnoreCase ( NetflixHeader . Vip . headerName ( ) ) ) { withVip ( value ) ; } else { this . requestHeaders . add ( new Header ( name , value ) ) ; } return this ;
public class ExampleStereoTwoViewsOneCamera { /** * Show results as a point cloud */ public static void showPointCloud ( ImageGray disparity , BufferedImage left , Se3_F64 motion , DMatrixRMaj rectifiedK , DMatrixRMaj rectifiedR , int minDisparity , int maxDisparity ) { } }
DisparityToColorPointCloud d2c = new DisparityToColorPointCloud ( ) ; double baseline = motion . getT ( ) . norm ( ) ; d2c . configure ( baseline , rectifiedK , rectifiedR , new DoNothing2Transform2_F64 ( ) , minDisparity , maxDisparity ) ; d2c . process ( disparity , left ) ; CameraPinhole rectifiedPinhole = PerspectiveOps . matrixToPinhole ( rectifiedK , disparity . width , disparity . height , null ) ; // skew the view to make the structure easier to see Se3_F64 cameraToWorld = SpecialEuclideanOps_F64 . eulerXyz ( - baseline * 5 , 0 , 0 , 0 , 0.2 , 0 , null ) ; PointCloudViewer pcv = VisualizeData . createPointCloudViewer ( ) ; pcv . setCameraHFov ( PerspectiveOps . computeHFov ( rectifiedPinhole ) ) ; pcv . setCameraToWorld ( cameraToWorld ) ; pcv . setTranslationStep ( baseline / 3 ) ; pcv . addCloud ( d2c . getCloud ( ) , d2c . getCloudColor ( ) ) ; pcv . setDotSize ( 1 ) ; pcv . setTranslationStep ( baseline / 10 ) ; pcv . getComponent ( ) . setPreferredSize ( new Dimension ( left . getWidth ( ) , left . getHeight ( ) ) ) ; ShowImages . showWindow ( pcv . getComponent ( ) , "Point Cloud" , true ) ;
public class IoUtil { /** * Read and return the entire contents of the supplied { @ link File file } . * @ param file the file containing the contents ; may be null * @ return the contents , or an empty byte array if the supplied file is null * @ throws IOException if there is an error reading the content */ public static byte [ ] readBytes ( File file ) throws IOException { } }
if ( file == null ) return new byte [ ] { } ; InputStream stream = new BufferedInputStream ( new FileInputStream ( file ) ) ; boolean error = false ; try { return readBytes ( stream ) ; } catch ( IOException e ) { error = true ; // this error should be thrown , even if there is an error closing stream throw e ; } catch ( RuntimeException e ) { error = true ; // this error should be thrown , even if there is an error closing stream throw e ; } finally { try { stream . close ( ) ; } catch ( IOException e ) { if ( ! error ) throw e ; } }
public class Unique { /** * { @ inheritDoc } * @ throws SuperCsvCellProcessorException * if value is null * @ throws SuperCsvConstraintViolationException * if a non - unique value is encountered */ public Object execute ( final Object value , final CsvContext context ) { } }
validateInputNotNull ( value , context ) ; if ( ! encounteredElements . add ( value ) ) { throw new SuperCsvConstraintViolationException ( String . format ( "duplicate value '%s' encountered" , value ) , context , this ) ; } return next . execute ( value , context ) ;
public class DeleteVpnConnectionRouteRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DeleteVpnConnectionRouteRequest > getDryRunRequest ( ) { } }
Request < DeleteVpnConnectionRouteRequest > request = new DeleteVpnConnectionRouteRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class DateUtil { /** * Get the minute of the date * @ param date date * @ return minute of the date */ public static int getMinute ( Date date ) { } }
Calendar c = Calendar . getInstance ( ) ; c . setTime ( date ) ; return c . get ( Calendar . MINUTE ) ;
public class AssociativeArray { /** * Adds a particular key - value into the internal map . It returns the previous * value which was associated with that key . * @ param key * @ param value * @ return */ public final Object put ( Object key , Object value ) { } }
return internalData . put ( key , value ) ;
public class DepictionGenerator { /** * Generate a bound element that is the title of the provided molecule . If title * is not specified an empty bounds is returned . * @ param chemObj molecule or reaction * @ return bound element */ private Bounds generateTitle ( IChemObject chemObj , double scale ) { } }
String title = chemObj . getProperty ( CDKConstants . TITLE ) ; if ( title == null || title . isEmpty ( ) ) return new Bounds ( ) ; scale = 1 / scale * getParameterValue ( RendererModel . TitleFontScale . class ) ; return new Bounds ( MarkedElement . markup ( StandardGenerator . embedText ( font , title , getParameterValue ( RendererModel . TitleColor . class ) , scale ) , "title" ) ) ;
public class CmsMenuItemVisibilityMode { /** * Adds the name of the message key for the visibility mode . < p > * @ param messageKey the name of the message key for the visibility mode * @ return an extended visibility mode containing the message key */ public CmsMenuItemVisibilityMode addMessageKey ( String messageKey ) { } }
CmsMenuItemVisibilityMode mode = clone ( ) ; mode . m_messageKey = messageKey ; return mode ;
public class Sql { /** * Performs the given SQL query calling the given Closure with each row of the result set . * The row will be a < code > GroovyResultSet < / code > which is a < code > ResultSet < / code > * that supports accessing the fields using property style notation and ordinal index values . * The query may contain GString expressions . * Example usage : * < pre > * def location = 25 * sql . eachRow ( " select * from PERSON where location _ id < $ location " ) { row - > * println row . firstname * < / pre > * Resource handling is performed automatically where appropriate . * @ param gstring a GString containing the SQL query with embedded params * @ param closure called for each row with a GroovyResultSet * @ throws SQLException if a database access error occurs * @ see # expand ( Object ) */ public void eachRow ( GString gstring , Closure closure ) throws SQLException { } }
eachRow ( gstring , null , closure ) ;
public class StandardRoadConnection { /** * Add a segment to this connection point . * < p > The segments are ordered according to there * geo - localization along a trigonometric cicle . * The first segment has the nearest angle to the * vector ( 1,0 ) , and the following segments are * ordered according to the positive value of there * angles to this unity vector ( counterclockwise order ) . * @ param segment is the segment to add . * @ param attachToStartPoint indicates if the segment must be attached by * its start point ( if value is < code > true < / code > ) or by its end point * ( if value is < code > false < / code > ) . */ void addConnectedSegment ( RoadPolyline segment , boolean attachToStartPoint ) { } }
if ( segment == null ) { return ; } if ( this . connectedSegments . isEmpty ( ) ) { this . connectedSegments . add ( new Connection ( segment , attachToStartPoint ) ) ; } else { // Compute the angle to the unit vector for the new segment final double newSegmentAngle = computeAngle ( segment , attachToStartPoint ) ; // Search for the insertion index final int insertionIndex = searchInsertionIndex ( newSegmentAngle , 0 , this . connectedSegments . size ( ) - 1 ) ; // Insert this . connectedSegments . add ( insertionIndex , new Connection ( segment , attachToStartPoint ) ) ; } fireIteratorUpdate ( ) ;
public class InternalXtextParser { /** * InternalXtext . g : 64:1 : entryRuleGrammar returns [ EObject current = null ] : iv _ ruleGrammar = ruleGrammar EOF ; */ public final EObject entryRuleGrammar ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleGrammar = null ; try { // InternalXtext . g : 64:48 : ( iv _ ruleGrammar = ruleGrammar EOF ) // InternalXtext . g : 65:2 : iv _ ruleGrammar = ruleGrammar EOF { newCompositeNode ( grammarAccess . getGrammarRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; iv_ruleGrammar = ruleGrammar ( ) ; state . _fsp -- ; current = iv_ruleGrammar ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class DomHelper { /** * Move an element from on group to another . The elements name will remain the same . * @ param name * The name of the element within the sourceParent group . * @ param sourceParent * The original parent group of the element . * @ param targetParent * The target parent group for the element . * @ return true when move was successful */ public boolean moveElement ( String name , Object sourceParent , Object targetParent ) { } }
Element sourceGroup = null ; Element targetGroup = null ; Element element = null ; if ( sourceParent != null ) { sourceGroup = getGroup ( sourceParent ) ; element = getElement ( sourceParent , name ) ; } if ( targetParent != null ) { targetGroup = getGroup ( targetParent ) ; } if ( sourceGroup == null || targetGroup == null ) { return false ; } if ( Dom . isOrHasChild ( sourceGroup , element ) ) { Dom . removeChild ( sourceGroup , element ) ; String newId = Dom . assembleId ( targetGroup . getId ( ) , name ) ; elementToName . remove ( element . getId ( ) ) ; elementToName . put ( newId , name ) ; Dom . setElementAttribute ( element , "id" , newId ) ; Dom . appendChild ( targetGroup , element ) ; return true ; } return false ;
public class Observable { /** * Returns an Observable that , when an observer subscribes to it , invokes a function you specify and then * emits the value returned from that function . * < img width = " 640 " height = " 310 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / fromCallable . png " alt = " " > * This allows you to defer the execution of the function you specify until an observer subscribes to the * ObservableSource . That is to say , it makes the function " lazy . " * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code fromCallable } does not operate by default on a particular { @ link Scheduler } . < / dd > * < dt > < b > Error handling : < / b > < / dt > * < dd > If the { @ link Callable } throws an exception , the respective { @ link Throwable } is * delivered to the downstream via { @ link Observer # onError ( Throwable ) } , * except when the downstream has disposed this { @ code Observable } source . * In this latter case , the { @ code Throwable } is delivered to the global error handler via * { @ link RxJavaPlugins # onError ( Throwable ) } as an { @ link io . reactivex . exceptions . UndeliverableException UndeliverableException } . * < / dd > * < / dl > * @ param supplier * a function , the execution of which should be deferred ; { @ code fromCallable } will invoke this * function only when an observer subscribes to the ObservableSource that { @ code fromCallable } returns * @ param < T > * the type of the item emitted by the ObservableSource * @ return an Observable whose { @ link Observer } s ' subscriptions trigger an invocation of the given function * @ see # defer ( Callable ) * @ since 2.0 */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public static < T > Observable < T > fromCallable ( Callable < ? extends T > supplier ) { } }
ObjectHelper . requireNonNull ( supplier , "supplier is null" ) ; return RxJavaPlugins . onAssembly ( new ObservableFromCallable < T > ( supplier ) ) ;
public class SqlContextImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . fluent . SqlFluent # inOutParamIfAbsent ( java . lang . String , java . lang . Object , int ) */ @ Override public SqlContext inOutParamIfAbsent ( final String parameterName , final Object value , final int sqlType ) { } }
if ( ! hasParam ( parameterName ) ) { inOutParam ( parameterName , value , sqlType ) ; } return this ;
public class AutoAuthCriteriaParser { /** * Parses the provided criteria expression , which must have a * boolean value . * @ param criteria the criteria expression string . If < code > null < / code > , * this method returns < code > true < / code > . * @ param attributes any user attribute - value pairs . The attribute names * may be used as variables . * @ return < code > true < / code > if the provided criteria is < code > null < / code > * or evaluates to < code > true < / code > , < code > false < / code > otherwise . * @ throws CriteriaParseException if an error occurred parsing the criteria * expression , most likely because the expression is invalid . */ public boolean parse ( String criteria , Map < String , ? extends Object > attributes ) throws CriteriaParseException { } }
if ( criteria == null ) { return true ; } else { try { return Boolean . parseBoolean ( FREEMARKER_BUILTINS . eval ( criteria , attributes ) ) ; } catch ( TemplateException ex ) { throw new CriteriaParseException ( ex ) ; } }
public class Forward { /** * Get the type of return as a String , if this is a < code > return - to < / code > type . * @ return one of the following values : < code > currentPage < / code > , < code > previousPage < / code > , < code > page < / code > , * ( deprecated ) , < code > previousAction < / code > , < code > action < / code > ( deprecated ) , or < code > null < / code > * if this is not a < code > return - to < / code > type . * @ see # isReturnToAction * @ see # isReturnToPage */ public String getReturnToTypeAsString ( ) { } }
switch ( _returnToType ) { case RETURN_TO_CURRENT_PAGE : return RETURN_TO_CURRENT_PAGE_STR ; case RETURN_TO_PREVIOUS_PAGE : return RETURN_TO_PREVIOUS_PAGE_STR ; case RETURN_TO_PAGE : return RETURN_TO_PAGE_LEGACY_STR ; case RETURN_TO_PREVIOUS_ACTION : return RETURN_TO_PREVIOUS_ACTION_STR ; case RETURN_TO_ACTION : return RETURN_TO_ACTION_LEGACY_STR ; } return null ;
public class GeometryMergeService { /** * Remove a geometry from the merging list again . * @ param geometry The geometry to remove . * @ throws GeometryMergeException In case the merging process has not been started . */ public void removeGeometry ( Geometry geometry ) throws GeometryMergeException { } }
if ( ! busy ) { throw new GeometryMergeException ( "Can't remove a geometry if no merging process is active." ) ; } geometries . remove ( geometry ) ; eventBus . fireEvent ( new GeometryMergeRemovedEvent ( geometry ) ) ;
public class Stopwatch { /** * Stops the stopwatch . Future reads will return the fixed duration that had elapsed up to this * point . * @ return this { @ code Stopwatch } instance * @ throws IllegalStateException if the stopwatch is already stopped . */ public Stopwatch stop ( ) { } }
long tick = ticker . read ( ) ; checkState ( isRunning , "This stopwatch is already stopped." ) ; isRunning = false ; elapsedNanos += tick - startTick ; return this ;
public class Signatures { /** * Fixes an arguments array to fit a given length , * the last value is an array filled with varargs . * @ param length * @ param varArgType * @ param arguments * @ return fixed arguments array */ @ SuppressWarnings ( "SuspiciousSystemArraycopy" ) public static Object [ ] fixVarArgs ( int length , Class < ? > varArgType , Object [ ] arguments ) { } }
final Object [ ] result ; if ( arguments . length == length && varArgType . isInstance ( arguments [ length - 1 ] ) ) { return arguments ; } else { result = Arrays . copyOf ( arguments , length , Object [ ] . class ) ; } final Object varArgs ; Class < ? > varArgElementType = varArgType . getComponentType ( ) ; if ( varArgElementType . isPrimitive ( ) ) { varArgs = Boxing . unboxAll ( varArgElementType , arguments , length - 1 , - 1 ) ; } else { int varLen = arguments . length - length + 1 ; varArgs = Array . newInstance ( varArgElementType , varLen ) ; System . arraycopy ( arguments , length - 1 , varArgs , 0 , varLen ) ; } result [ length - 1 ] = varArgs ; return result ;
public class HolidayProcessor { /** * Get the date of the Shrove - Tide week in a given year * @ author Elena Klyachko * @ param year * @ return date */ public String getShroveTideWeekOrthodox ( int year ) { } }
String easterOrthodox = getEasterSundayOrthodox ( year ) ; SimpleDateFormat formatter = new SimpleDateFormat ( "yyyy-MM-dd" ) ; try { Calendar calendar = Calendar . getInstance ( ) ; Date date = formatter . parse ( easterOrthodox ) ; calendar . setTime ( date ) ; calendar . add ( Calendar . DAY_OF_MONTH , - 49 ) ; int shroveTideWeek = calendar . get ( Calendar . WEEK_OF_YEAR ) ; if ( shroveTideWeek < 10 ) { return year + "-W0" + shroveTideWeek ; } return year + "-W" + shroveTideWeek ; } catch ( ParseException pe ) { Logger . printError ( "ParseException:" + pe . getMessage ( ) ) ; return "unknown" ; }
public class Code { /** * Executes { @ code op } and sets { @ code target } to the result . */ public < T > void op ( UnaryOp op , Local < T > target , Local < T > source ) { } }
addInstruction ( new PlainInsn ( op . rop ( source . type ) , sourcePosition , target . spec ( ) , source . spec ( ) ) ) ;
public class MultipleFieldConverter { /** * For binary fields , set the current state . * @ param state The state to set this field . * @ param bDisplayOption Display changed fields if true . * @ param iMoveMode The move mode . * @ return The error code ( or NORMAL _ RETURN ) . */ public int setData ( Object state , boolean bDisplayOption , int iMoveMode ) { } }
// Must be overidden m_bSetData = true ; // Make sure getNextConverter is called correctly ( if it is called ) . int iErrorCode = super . setData ( state , bDisplayOption , iMoveMode ) ; m_bSetData = false ; return iErrorCode ;
public class CPMeasurementUnitPersistenceImpl { /** * Returns the cp measurement units before and after the current cp measurement unit in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param CPMeasurementUnitId the primary key of the current cp measurement unit * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp measurement unit * @ throws NoSuchCPMeasurementUnitException if a cp measurement unit with the primary key could not be found */ @ Override public CPMeasurementUnit [ ] findByUuid_C_PrevAndNext ( long CPMeasurementUnitId , String uuid , long companyId , OrderByComparator < CPMeasurementUnit > orderByComparator ) throws NoSuchCPMeasurementUnitException { } }
CPMeasurementUnit cpMeasurementUnit = findByPrimaryKey ( CPMeasurementUnitId ) ; Session session = null ; try { session = openSession ( ) ; CPMeasurementUnit [ ] array = new CPMeasurementUnitImpl [ 3 ] ; array [ 0 ] = getByUuid_C_PrevAndNext ( session , cpMeasurementUnit , uuid , companyId , orderByComparator , true ) ; array [ 1 ] = cpMeasurementUnit ; array [ 2 ] = getByUuid_C_PrevAndNext ( session , cpMeasurementUnit , uuid , companyId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class ResourceManager { /** * Sync existing IVdmSource files with the build path of the project . This is used when the build path changed and * the project should be updated . This method removed old IVdmSource files which no longer is withing the build * path . * @ param project * @ throws CoreException */ public synchronized void syncBuildPath ( IVdmProject project ) throws CoreException { } }
List < IVdmSourceUnit > syncedVdmSourceUnits = project . getSpecFiles ( ) ; List < IFile > removedFiles = new Vector < IFile > ( ) ; IProject p = ( IProject ) project . getAdapter ( IProject . class ) ; for ( IFile file : vdmSourceUnits . keySet ( ) ) { if ( file . getProject ( ) . equals ( p ) && ! syncedVdmSourceUnits . contains ( vdmSourceUnits . get ( file ) ) ) { removedFiles . add ( file ) ; System . out . println ( "Found an existing file removed from build path: " + file ) ; } } // remove the old files for ( IFile iFile : removedFiles ) { remove ( iFile ) ; }
public class Gmap3WicketToDoItems { /** * region > programmatic helpers */ @ Programmatic // for use by fixtures public Gmap3ToDoItem newToDo ( final String description , final String userName ) { } }
final Gmap3ToDoItem toDoItem = repositoryService . instantiate ( Gmap3ToDoItem . class ) ; toDoItem . setDescription ( description ) ; toDoItem . setOwnedBy ( userName ) ; toDoItem . setLocation ( new Location ( 51.5172 + random ( - 0.05 , + 0.05 ) , 0.1182 + random ( - 0.05 , + 0.05 ) ) ) ; repositoryService . persistAndFlush ( toDoItem ) ; return toDoItem ;
public class LinearLayoutHelper { /** * In { @ link LinearLayoutHelper } , each iteration only consume one item , * so it can let parent LayoutManager to decide whether the next item is in the range of this helper */ @ Override public void layoutViews ( RecyclerView . Recycler recycler , RecyclerView . State state , VirtualLayoutManager . LayoutStateWrapper layoutState , LayoutChunkResult result , LayoutManagerHelper helper ) { } }
// reach the end of this layout if ( isOutOfRange ( layoutState . getCurrentPosition ( ) ) ) { return ; } int currentPosition = layoutState . getCurrentPosition ( ) ; // find corresponding layout container View view = nextView ( recycler , layoutState , helper , result ) ; if ( view == null ) { return ; } final boolean isOverLapMargin = helper . isEnableMarginOverLap ( ) ; VirtualLayoutManager . LayoutParams params = ( VirtualLayoutManager . LayoutParams ) view . getLayoutParams ( ) ; final boolean layoutInVertical = helper . getOrientation ( ) == VERTICAL ; int startSpace = 0 , endSpace = 0 , gap = 0 ; boolean isLayoutEnd = layoutState . getLayoutDirection ( ) == VirtualLayoutManager . LayoutStateWrapper . LAYOUT_END ; boolean isStartLine = isLayoutEnd ? currentPosition == getRange ( ) . getLower ( ) . intValue ( ) : currentPosition == getRange ( ) . getUpper ( ) . intValue ( ) ; boolean isEndLine = isLayoutEnd ? currentPosition == getRange ( ) . getUpper ( ) . intValue ( ) : currentPosition == getRange ( ) . getLower ( ) . intValue ( ) ; if ( isStartLine ) { startSpace = computeStartSpace ( helper , layoutInVertical , isLayoutEnd , isOverLapMargin ) ; } if ( isEndLine ) { endSpace = computeEndSpace ( helper , layoutInVertical , isLayoutEnd , isOverLapMargin ) ; } if ( ! isStartLine ) { if ( ! isOverLapMargin ) { gap = mLayoutWithAnchor ? 0 : mDividerHeight ; } else { // TODO check layout with anchor if ( isLayoutEnd ) { int marginTop = params . topMargin ; View sibling = helper . findViewByPosition ( currentPosition - 1 ) ; int lastMarginBottom = sibling != null ? ( ( LayoutParams ) sibling . getLayoutParams ( ) ) . bottomMargin : 0 ; if ( lastMarginBottom >= 0 && marginTop >= 0 ) { gap = Math . max ( lastMarginBottom , marginTop ) ; } else { gap = lastMarginBottom + marginTop ; } } else { int marginBottom = params . bottomMargin ; View sibling = helper . findViewByPosition ( currentPosition + 1 ) ; int lastMarginTop = sibling != null ? ( ( LayoutParams ) sibling . getLayoutParams ( ) ) . topMargin : 0 ; if ( marginBottom >= 0 && lastMarginTop >= 0 ) { gap = Math . max ( marginBottom , lastMarginTop ) ; } else { gap = marginBottom + lastMarginTop ; } } } } final int widthSize = helper . getContentWidth ( ) - helper . getPaddingLeft ( ) - helper . getPaddingRight ( ) - getHorizontalMargin ( ) - getHorizontalPadding ( ) ; int widthSpec = helper . getChildMeasureSpec ( widthSize , params . width , ! layoutInVertical ) ; int heightSpec ; float viewAspectRatio = params . mAspectRatio ; if ( ! Float . isNaN ( viewAspectRatio ) && viewAspectRatio > 0 ) { heightSpec = View . MeasureSpec . makeMeasureSpec ( ( int ) ( widthSize / viewAspectRatio + 0.5f ) , View . MeasureSpec . EXACTLY ) ; } else if ( ! Float . isNaN ( mAspectRatio ) && mAspectRatio > 0 ) { heightSpec = View . MeasureSpec . makeMeasureSpec ( ( int ) ( widthSize / mAspectRatio + 0.5 ) , View . MeasureSpec . EXACTLY ) ; } else { heightSpec = helper . getChildMeasureSpec ( helper . getContentHeight ( ) - helper . getPaddingTop ( ) - helper . getPaddingBottom ( ) - getVerticalMargin ( ) - getVerticalPadding ( ) , params . height , layoutInVertical ) ; } if ( ! isOverLapMargin ) { helper . measureChildWithMargins ( view , widthSpec , heightSpec ) ; } else { helper . measureChild ( view , widthSpec , heightSpec ) ; } OrientationHelperEx orientationHelper = helper . getMainOrientationHelper ( ) ; result . mConsumed = orientationHelper . getDecoratedMeasurement ( view ) + startSpace + endSpace + gap ; int left , top , right , bottom ; if ( helper . getOrientation ( ) == VERTICAL ) { // not support RTL now if ( helper . isDoLayoutRTL ( ) ) { right = helper . getContentWidth ( ) - helper . getPaddingRight ( ) - mMarginRight - mPaddingRight ; left = right - orientationHelper . getDecoratedMeasurementInOther ( view ) ; } else { left = helper . getPaddingLeft ( ) + mMarginLeft + mPaddingLeft ; right = left + orientationHelper . getDecoratedMeasurementInOther ( view ) ; } // whether this layout pass is layout to start or to end if ( layoutState . getLayoutDirection ( ) == VirtualLayoutManager . LayoutStateWrapper . LAYOUT_START ) { // fill start , from bottom to top bottom = layoutState . getOffset ( ) - startSpace - ( isStartLine ? 0 : gap ) ; top = bottom - orientationHelper . getDecoratedMeasurement ( view ) ; } else { // fill end , from top to bottom top = layoutState . getOffset ( ) + startSpace + ( isStartLine ? 0 : gap ) ; bottom = top + orientationHelper . getDecoratedMeasurement ( view ) ; } } else { top = helper . getPaddingTop ( ) + mMarginTop + mPaddingTop ; bottom = top + orientationHelper . getDecoratedMeasurementInOther ( view ) ; if ( layoutState . getLayoutDirection ( ) == VirtualLayoutManager . LayoutStateWrapper . LAYOUT_START ) { // fill left , from right to left right = layoutState . getOffset ( ) - startSpace - ( isStartLine ? 0 : gap ) ; left = right - orientationHelper . getDecoratedMeasurement ( view ) ; } else { // fill right , from left to right left = layoutState . getOffset ( ) + startSpace + ( isStartLine ? 0 : gap ) ; right = left + orientationHelper . getDecoratedMeasurement ( view ) ; } } // We calculate everything with View ' s bounding box ( which includes decor and margins ) // To calculate correct layout position , we subtract margins . layoutChildWithMargin ( view , left , top , right , bottom , helper ) ; if ( DEBUG ) { Log . d ( TAG , "laid out child at position " + helper . getPosition ( view ) + ", with l:" + ( left + params . leftMargin ) + ", t:" + ( top + params . topMargin ) + ", r:" + ( right - params . rightMargin ) + ", b:" + ( bottom - params . bottomMargin ) ) ; } handleStateOnResult ( result , view ) ; mLayoutWithAnchor = false ;
public class Is { /** * Determines whether the element is enabled or not . * @ return Boolean : whether the element is enabled or not */ public boolean enabled ( ) { } }
boolean isEnabled = false ; try { // adding additional check for disabled attribute , due to issues with safari isEnabled = ( element . getWebElement ( ) . isEnabled ( ) && ! element . get ( ) . allAttributes ( ) . containsKey ( "disabled" ) ) ; } catch ( NullPointerException | NoSuchElementException e ) { // Null pointer means the element was deleted , and there has no attributes . No such element means the same thing log . info ( e ) ; } return isEnabled ;
public class UTCDateTimeUtils { /** * Returns the { @ link Date } for the values of the UTCDateBox and * UTCTimeBox which were edited in the specified { @ link TimeZone } . * @ param zone * The { @ link TimeZone } in which the Date was edited . * @ param dateBoxValue * The value of the { @ link UTCDateBox } control . * @ param timeBoxValue * The value of the { @ link UTCTimeBox } control . * @ return The { @ link Date } that has been selected from the * controls or null if the supplied dateBoxValue is null . * If the timeBoxValue is null , midnight is returned . */ public static final Date getDateValue ( TimeZone zone , Long dateBoxValue , Long timeBoxValue ) { } }
if ( dateBoxValue == null ) return null ; Calendar gmt = GregorianCalendar . getInstance ( TimeZone . getTimeZone ( "GMT" ) ) ; gmt . setTimeInMillis ( dateBoxValue . longValue ( ) ) ; Calendar cal = GregorianCalendar . getInstance ( zone ) ; cal . set ( gmt . get ( Calendar . YEAR ) , gmt . get ( Calendar . MONTH ) , gmt . get ( Calendar . DAY_OF_MONTH ) ) ; int hours , minutes , extraMillis ; if ( timeBoxValue != null ) { // figure out how many hours and minutes to add to // midnight in the specified time zone . long localTimeInDay = timeBoxValue . longValue ( ) ; // figure out if there are extra millis in the value // ( there shoudn ' t be since the time box control doesn ' t // typically render millis ) extraMillis = ( int ) ( localTimeInDay % ( 60 * 1000 ) ) ; // trim off the seconds localTimeInDay -= extraMillis ; minutes = ( int ) ( ( localTimeInDay / 60 / 1000 ) % 60 ) ; // trim off the minutes localTimeInDay -= minutes ; hours = ( int ) ( localTimeInDay / 60 / 60 / 1000 ) ; } else { // midnight hours = 0 ; minutes = 0 ; extraMillis = 0 ; } cal . set ( Calendar . HOUR_OF_DAY , hours ) ; cal . set ( Calendar . MINUTE , minutes ) ; cal . set ( Calendar . SECOND , 0 ) ; cal . set ( Calendar . MILLISECOND , 0 ) ; return new Date ( cal . getTimeInMillis ( ) + extraMillis ) ;
public class LocationBasedPerformanceTiersInner { /** * List all the performance tiers at specified location in a given subscription . * @ param locationName The name of the location . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; PerformanceTierPropertiesInner & gt ; object */ public Observable < List < PerformanceTierPropertiesInner > > listAsync ( String locationName ) { } }
return listWithServiceResponseAsync ( locationName ) . map ( new Func1 < ServiceResponse < List < PerformanceTierPropertiesInner > > , List < PerformanceTierPropertiesInner > > ( ) { @ Override public List < PerformanceTierPropertiesInner > call ( ServiceResponse < List < PerformanceTierPropertiesInner > > response ) { return response . body ( ) ; } } ) ;
public class RangedBeacon { /** * Done at the end of each cycle before data are sent to the client */ public void commitMeasurements ( ) { } }
if ( ! getFilter ( ) . noMeasurementsAvailable ( ) ) { double runningAverage = getFilter ( ) . calculateRssi ( ) ; mBeacon . setRunningAverageRssi ( runningAverage ) ; mBeacon . setRssiMeasurementCount ( getFilter ( ) . getMeasurementCount ( ) ) ; LogManager . d ( TAG , "calculated new runningAverageRssi: %s" , runningAverage ) ; } else { LogManager . d ( TAG , "No measurements available to calculate running average" ) ; } mBeacon . setPacketCount ( packetCount ) ; packetCount = 0 ;
public class Dialogs { /** * 获得一个 { @ link Dialog } 对象 * @ param title 标题 * @ param ok 确认按钮 * @ return { @ link Dialog } */ public static Dialog < String [ ] > getDialog ( String title , ButtonType ok ) { } }
Dialog < String [ ] > dialog = new Dialog < > ( ) ; dialog . setTitle ( title ) ; dialog . setHeaderText ( null ) ; dialog . initModality ( Modality . APPLICATION_MODAL ) ; // 自定义确认和取消按钮 ButtonType cancel = new ButtonType ( CANCEL_BUTTON_TEXT , ButtonData . CANCEL_CLOSE ) ; dialog . getDialogPane ( ) . getButtonTypes ( ) . addAll ( ok , cancel ) ; return dialog ;
public class CommonIronJacamarParser { /** * Store a pool * @ param pool The pool * @ param writer The writer * @ exception Exception Thrown if an error occurs */ protected void storePool ( Pool pool , XMLStreamWriter writer ) throws Exception { } }
writer . writeStartElement ( CommonXML . ELEMENT_POOL ) ; if ( pool . getType ( ) != null ) writer . writeAttribute ( CommonXML . ATTRIBUTE_TYPE , pool . getValue ( CommonXML . ATTRIBUTE_TYPE , pool . getType ( ) ) ) ; if ( pool . getJanitor ( ) != null ) writer . writeAttribute ( CommonXML . ATTRIBUTE_JANITOR , pool . getValue ( CommonXML . ATTRIBUTE_JANITOR , pool . getJanitor ( ) ) ) ; if ( pool . getMinPoolSize ( ) != null && ( pool . hasExpression ( CommonXML . ELEMENT_MIN_POOL_SIZE ) || ! Defaults . MIN_POOL_SIZE . equals ( pool . getMinPoolSize ( ) ) ) ) { writer . writeStartElement ( CommonXML . ELEMENT_MIN_POOL_SIZE ) ; writer . writeCharacters ( pool . getValue ( CommonXML . ELEMENT_MIN_POOL_SIZE , pool . getMinPoolSize ( ) . toString ( ) ) ) ; writer . writeEndElement ( ) ; } if ( pool . getInitialPoolSize ( ) != null ) { writer . writeStartElement ( CommonXML . ELEMENT_INITIAL_POOL_SIZE ) ; writer . writeCharacters ( pool . getValue ( CommonXML . ELEMENT_INITIAL_POOL_SIZE , pool . getInitialPoolSize ( ) . toString ( ) ) ) ; writer . writeEndElement ( ) ; } if ( pool . getMaxPoolSize ( ) != null && ( pool . hasExpression ( CommonXML . ELEMENT_MAX_POOL_SIZE ) || ! Defaults . MAX_POOL_SIZE . equals ( pool . getMaxPoolSize ( ) ) ) ) { writer . writeStartElement ( CommonXML . ELEMENT_MAX_POOL_SIZE ) ; writer . writeCharacters ( pool . getValue ( CommonXML . ELEMENT_MAX_POOL_SIZE , pool . getMaxPoolSize ( ) . toString ( ) ) ) ; writer . writeEndElement ( ) ; } if ( pool . isPrefill ( ) != null && ( pool . hasExpression ( CommonXML . ELEMENT_PREFILL ) || ! Defaults . PREFILL . equals ( pool . isPrefill ( ) ) ) ) { writer . writeStartElement ( CommonXML . ELEMENT_PREFILL ) ; writer . writeCharacters ( pool . getValue ( CommonXML . ELEMENT_PREFILL , pool . isPrefill ( ) . toString ( ) ) ) ; writer . writeEndElement ( ) ; } if ( pool . getFlushStrategy ( ) != null && ( pool . hasExpression ( CommonXML . ELEMENT_FLUSH_STRATEGY ) || ! Defaults . FLUSH_STRATEGY . equals ( pool . getFlushStrategy ( ) ) ) ) { writer . writeStartElement ( CommonXML . ELEMENT_FLUSH_STRATEGY ) ; writer . writeCharacters ( pool . getValue ( CommonXML . ELEMENT_FLUSH_STRATEGY , pool . getFlushStrategy ( ) . toString ( ) ) ) ; writer . writeEndElement ( ) ; } if ( pool . getCapacity ( ) != null ) storeCapacity ( pool . getCapacity ( ) , writer ) ; writer . writeEndElement ( ) ;
public class GBSTree { /** * Delete from current or successor . * < ol > * < li > There is no right child * < li > There may or may not be a left child . * < li > DELETE _ KEY > MEDIAN of current node . * < li > DELETE _ KEY < MEDIAN of successor ( if there is one ) * < / ol > * < p > In the example below , if the current node is GHI , then the delete * key is greater than H and less than K . If the current node is * MNO , then the delete key is greater than N and less than Q . If * the current node is STU , the insert key is greater than T and * there is no successor . < / p > * < pre > * * - - - - - J . K . L - - - - - * * * - - - - - D . E . F - - - - - * * - - - - - P . Q . R - - - - - * * A . B . C G . H . I M . N . O S . T . U * < / pre > * < p > We tried to move right and fell off the end . If the key value is * less than the low key of the successor or if there is no successor , * the delete key ( if any ) is in the right half of the current node . * Otherwise , the delete key ( if any ) is in the left half of the * successor node . < / p > * @ param p Current node from which we tried to move left * @ param l Last node from which we actually moved left ( logical successor ) * @ param deleteKey Key being deleted * @ param point Returned delete point */ private void rightDelete ( GBSNode p , GBSNode l , Object deleteKey , DeleteNode point ) { } }
if ( l == null ) /* There is no upper successor */ rightDeleteNoSuccessor ( p , deleteKey , point ) ; else /* There is an upper successor */ rightDeleteWithSuccessor ( p , l , deleteKey , point ) ;
public class SVGForTextInBoxTree { /** * generating */ private void generateEdges ( StringBuilder result , TextInBox parent ) { } }
if ( ! getTree ( ) . isLeaf ( parent ) ) { Rectangle2D . Double b1 = getBoundsOfNode ( parent ) ; double x1 = b1 . getCenterX ( ) ; double y1 = b1 . getCenterY ( ) ; for ( TextInBox child : getChildren ( parent ) ) { Rectangle2D . Double b2 = getBoundsOfNode ( child ) ; result . append ( line ( x1 , y1 , b2 . getCenterX ( ) , b2 . getCenterY ( ) , "stroke:black; stroke-width:2px;" ) ) ; generateEdges ( result , child ) ; } }
public class LayoutStructure { /** * Add a parameter to this LayoutStructure . * @ param paramName the name of the parameter * @ param paramValue the value of the parameter */ public void addParameter ( String paramName , String paramValue ) { } }
this . parameters . add ( new StructureParameter ( paramName , paramValue ) ) ;
public class CmsSolrIndex { /** * Default search method . < p > * @ param cms the current CMS object * @ param query the query * @ return the results * @ throws CmsSearchException if something goes wrong * @ see # search ( CmsObject , String ) */ public CmsSolrResultList search ( CmsObject cms , SolrQuery query ) throws CmsSearchException { } }
return search ( cms , CmsEncoder . decode ( query . toString ( ) ) ) ;
public class ConvertDMatrixStruct { /** * Converts { @ link DMatrix6x6 } into { @ link DMatrixRMaj } . * @ param input Input matrix . * @ param output Output matrix . If null a new matrix will be declared . * @ return Converted matrix . */ public static DMatrixRMaj convert ( DMatrix6x6 input , DMatrixRMaj output ) { } }
if ( output == null ) output = new DMatrixRMaj ( 6 , 6 ) ; output . reshape ( input . getNumRows ( ) , input . getNumCols ( ) ) ; output . data [ 0 ] = input . a11 ; output . data [ 1 ] = input . a12 ; output . data [ 2 ] = input . a13 ; output . data [ 3 ] = input . a14 ; output . data [ 4 ] = input . a15 ; output . data [ 5 ] = input . a16 ; output . data [ 6 ] = input . a21 ; output . data [ 7 ] = input . a22 ; output . data [ 8 ] = input . a23 ; output . data [ 9 ] = input . a24 ; output . data [ 10 ] = input . a25 ; output . data [ 11 ] = input . a26 ; output . data [ 12 ] = input . a31 ; output . data [ 13 ] = input . a32 ; output . data [ 14 ] = input . a33 ; output . data [ 15 ] = input . a34 ; output . data [ 16 ] = input . a35 ; output . data [ 17 ] = input . a36 ; output . data [ 18 ] = input . a41 ; output . data [ 19 ] = input . a42 ; output . data [ 20 ] = input . a43 ; output . data [ 21 ] = input . a44 ; output . data [ 22 ] = input . a45 ; output . data [ 23 ] = input . a46 ; output . data [ 24 ] = input . a51 ; output . data [ 25 ] = input . a52 ; output . data [ 26 ] = input . a53 ; output . data [ 27 ] = input . a54 ; output . data [ 28 ] = input . a55 ; output . data [ 29 ] = input . a56 ; output . data [ 30 ] = input . a61 ; output . data [ 31 ] = input . a62 ; output . data [ 32 ] = input . a63 ; output . data [ 33 ] = input . a64 ; output . data [ 34 ] = input . a65 ; output . data [ 35 ] = input . a66 ; return output ;