signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SparkDl4jMultiLayer { /** * Evaluate on a directory containing a set of DataSet objects to be loaded with a { @ link DataSetLoader } .
* Uses default batch size of { @ link # DEFAULT _ EVAL _ SCORE _ BATCH _ SIZE }
* @ param path Path / URI to the directory containing the datasets to load
* @ return Evaluation */
public < T extends Evaluation > T evaluate ( String path , int batchSize , DataSetLoader loader ) { } } | JavaRDD < String > paths ; try { paths = SparkUtils . listPaths ( sc , path ) ; } catch ( IOException e ) { throw new RuntimeException ( "Error listing paths in directory" , e ) ; } JavaRDD < DataSet > rdd = paths . map ( new LoadDataSetFunction ( loader , new RemoteFileSourceFactory ( BroadcastHadoopConfigHolder . get ( sc ) ) ) ) ; return ( T ) doEvaluation ( rdd , batchSize , new org . deeplearning4j . eval . Evaluation ( ) ) [ 0 ] ; |
public class CSSIdentifierSerializer { /** * Serialize a CSS identifier .
* NOTE : This code was adapted from Mathias Bynens ' CSS . escape polyfill , available under the MIT license .
* See https : / / drafts . csswg . org / cssom / # serialize - an - identifier and https : / / github . com / mathiasbynens / CSS . escape .
* @ param identifier the identifier to serialize
* @ return the serialized identifier
* @ throws IllegalArgumentException if the input contains U + 0000 */
public String serialize ( String identifier ) { } } | if ( StringUtils . isEmpty ( identifier ) ) { return identifier ; } int length = identifier . length ( ) ; int index = - 1 ; StringBuilder result = new StringBuilder ( ) ; int firstCodeUnit = identifier . charAt ( 0 ) ; while ( ++ index < length ) { int codeUnit = identifier . charAt ( index ) ; // Note : there ' s no need to special - case astral symbols , surrogate pairs , or lone surrogates .
// If the character is NULL ( U + 0000 ) , throw an IllegalArgumentException exception and terminate these steps .
if ( codeUnit == 0x0000 ) { throw new IllegalArgumentException ( "Invalid character: the input contains U+0000." ) ; } else if ( shouldEscapeAsCodePoint ( codeUnit , index , firstCodeUnit ) ) { // https : / / drafts . csswg . org / cssom / # escape - a - character - as - code - point
result . append ( '\\' ) . append ( Integer . toHexString ( codeUnit ) ) . append ( ' ' ) ; } else if ( index == 0 && length == 1 && codeUnit == 0x002D ) { // If the character is the first character and is a ' - ' ( U + 002D ) , and there is no second character
// https : / / drafts . csswg . org / cssom / # escape - a - character
result . append ( '\\' ) . append ( identifier . charAt ( index ) ) ; } else if ( shouldNotEscape ( codeUnit ) ) { // the character itself
result . append ( identifier . charAt ( index ) ) ; } else { // Otherwise , the escaped character .
// https : / / drafts . csswg . org / cssom / # escape - a - character
result . append ( '\\' ) . append ( identifier . charAt ( index ) ) ; } } return result . toString ( ) ; |
public class JPATxEmInvocation { /** * ( non - Javadoc )
* @ see javax . persistence . EntityManager # find ( java . lang . Class , java . lang . Object ) */
@ Override public < T > T find ( Class < T > entityClass , Object primaryKey ) { } } | try { return ivEm . find ( entityClass , primaryKey ) ; } finally { if ( ! inJTATransaction ( ) ) { ivEm . clear ( ) ; } } |
public class TreeLoader { /** * Initialize */
@ PostConstruct @ Override public void init ( ) { } } | if ( _dir == null ) throw new ConfigException ( L . l ( "<tree-loader> requires a 'path' attribute" ) ) ; _dir . getLastModified ( ) ; super . init ( ) ; |
public class DisjointMultiAdditionNeighbourhood { /** * Generates the list of all possible moves that perform \ ( k \ ) additions , where \ ( k \ ) is the fixed number
* specified at construction . Note : taking into account the current number of unselected items , the imposed
* maximum subset size ( if set ) and the fixed IDs ( if any ) may result in fewer additions ( as many as possible ) .
* May return an empty list if no moves can be generated .
* @ param solution solution for which all possible multi addition moves are generated
* @ return list of all multi addition moves , may be empty */
@ Override public List < SubsetMove > getAllMoves ( SubsetSolution solution ) { } } | // create empty list to store generated moves
List < SubsetMove > moves = new ArrayList < > ( ) ; // get set of candidate IDs for addition ( fixed IDs are discarded )
Set < Integer > addCandidates = getAddCandidates ( solution ) ; // compute number of additions
int curNumAdd = numAdditions ( addCandidates , solution ) ; if ( curNumAdd == 0 ) { // impossible : return empty set
return moves ; } // create all moves that add curNumAdd items
Set < Integer > add ; SubsetIterator < Integer > itAdd = new SubsetIterator < > ( addCandidates , curNumAdd ) ; while ( itAdd . hasNext ( ) ) { add = itAdd . next ( ) ; // create and add move
moves . add ( new GeneralSubsetMove ( add , Collections . emptySet ( ) ) ) ; } // return all moves
return moves ; |
public class Krb5Common { /** * This method set the system property if the property is null or property value is not the same with the new value
* @ param propName
* @ param propValue
* @ return */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) public static String setPropertyAsNeeded ( final String propName , final String propValue ) { String previousPropValue = ( String ) java . security . AccessController . doPrivileged ( new java . security . PrivilegedAction ( ) { @ Override public String run ( ) { String oldPropValue = System . getProperty ( propName ) ; if ( propValue == null ) { System . clearProperty ( propName ) ; } else if ( ! propValue . equalsIgnoreCase ( oldPropValue ) ) { System . setProperty ( propName , propValue ) ; } return oldPropValue ; } } ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , propName + " property previous: " + ( ( previousPropValue != null ) ? previousPropValue : "<null>" ) + " and now: " + propValue ) ; return previousPropValue ; |
public class LoOP { /** * Compute the probabilistic distances used by LoOP .
* @ param relation Data relation
* @ param knn kNN query
* @ param pdists Storage for distances */
protected void computePDists ( Relation < O > relation , KNNQuery < O > knn , WritableDoubleDataStore pdists ) { } } | // computing PRDs
FiniteProgress prdsProgress = LOG . isVerbose ( ) ? new FiniteProgress ( "pdists" , relation . size ( ) , LOG ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { final KNNList neighbors = knn . getKNNForDBID ( iditer , kreach + 1 ) ; // query
// point
// use first kref neighbors as reference set
int ks = 0 ; double ssum = 0. ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) && ks < kreach ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( neighbor , iditer ) ) { continue ; } final double d = neighbor . doubleValue ( ) ; ssum += d * d ; ks ++ ; } double pdist = ks > 0 ? FastMath . sqrt ( ssum / ks ) : 0. ; pdists . putDouble ( iditer , pdist ) ; LOG . incrementProcessed ( prdsProgress ) ; } LOG . ensureCompleted ( prdsProgress ) ; |
public class TableStreamer { /** * Set the positions of the buffers to the start of the content , leaving some room for the headers . */
private void prepareBuffers ( List < DBBPool . BBContainer > buffers ) { } } | Preconditions . checkArgument ( buffers . size ( ) == m_tableTasks . size ( ) ) ; UnmodifiableIterator < SnapshotTableTask > iterator = m_tableTasks . iterator ( ) ; for ( DBBPool . BBContainer container : buffers ) { int headerSize = iterator . next ( ) . m_target . getHeaderSize ( ) ; final ByteBuffer buf = container . b ( ) ; buf . clear ( ) ; buf . position ( headerSize ) ; } |
public class N { /** * Returns an immutable empty set if the specified Set is < code > null < / code > , otherwise itself is returned .
* @ param set
* @ return */
public static < T > Set < T > nullToEmpty ( final Set < T > set ) { } } | return set == null ? N . < T > emptySet ( ) : set ; |
public class Settings { /** * Effective value as { @ code Double } .
* @ return the value as { @ code Double } . If the property does not have value nor default value , then { @ code null } is returned .
* @ throws NumberFormatException if value is not empty and is not a parsable number */
@ CheckForNull public Double getDouble ( String key ) { } } | String value = getString ( key ) ; if ( StringUtils . isNotEmpty ( value ) ) { try { return Double . valueOf ( value ) ; } catch ( NumberFormatException e ) { throw new IllegalStateException ( String . format ( "The property '%s' is not a double value" , key ) ) ; } } return null ; |
public class MiniProfiler { /** * Stop the profiler .
* This should be called in a { @ code finally } block to avoid leaving data on
* the thread .
* @ return The profiling data . */
protected static Profile stop ( ) { } } | Root result = PROFILER_STEPS . get ( ) ; PROFILER_STEPS . remove ( ) ; return result != null ? result . popData ( ) : null ; |
public class HFactory { /** * Create a counter column with a name and long value */
public static < N > HCounterColumn < N > createCounterColumn ( N name , long value , Serializer < N > nameSerializer ) { } } | return new HCounterColumnImpl < N > ( name , value , nameSerializer ) ; |
public class CmsXmlGroupContainerFactory { /** * Returns the cached group container . < p >
* @ param cms the cms context
* @ param resource the group container resource
* @ param keepEncoding if to keep the encoding while unmarshalling
* @ return the cached group container , or < code > null < / code > if not found */
private static CmsXmlGroupContainer getCache ( CmsObject cms , CmsResource resource , boolean keepEncoding ) { } } | if ( resource instanceof I_CmsHistoryResource ) { return null ; } return getCache ( ) . getCacheGroupContainer ( getCache ( ) . getCacheKey ( resource . getStructureId ( ) , keepEncoding ) , cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) ; |
public class PostVersionedIOReadableWritable { /** * This read attempts to first identify if the input view contains the special
* { @ link # VERSIONED _ IDENTIFIER } by reading and buffering the first few bytes .
* If identified to be versioned , the usual version resolution read path
* in { @ link VersionedIOReadableWritable # read ( DataInputView ) } is invoked .
* Otherwise , we " reset " the input stream by pushing back the read buffered bytes
* into the stream . */
public final void read ( InputStream inputStream ) throws IOException { } } | byte [ ] tmp = new byte [ VERSIONED_IDENTIFIER . length ] ; inputStream . read ( tmp ) ; if ( Arrays . equals ( tmp , VERSIONED_IDENTIFIER ) ) { DataInputView inputView = new DataInputViewStreamWrapper ( inputStream ) ; super . read ( inputView ) ; read ( inputView , true ) ; } else { PushbackInputStream resetStream = new PushbackInputStream ( inputStream , VERSIONED_IDENTIFIER . length ) ; resetStream . unread ( tmp ) ; read ( new DataInputViewStreamWrapper ( resetStream ) , false ) ; } |
public class EncodingUtils { /** * Hex encode string .
* @ param data the data
* @ return the string */
public static String hexEncode ( final byte [ ] data ) { } } | try { val result = Hex . encodeHex ( data ) ; return new String ( result ) ; } catch ( final Exception e ) { return null ; } |
public class FileSystemUtil { /** * Finds potential datasets by crawling a directory tree .
* This method looks for any data files and directories appear to form a
* dataset . This deliberately ignores information that may be stored in the
* Hive metastore or . metadata folders .
* Recognizes only Avro , Parquet , and JSON data files .
* @ param fs a FileSystem for the root path
* @ param path a root Path that will be searched
* @ return a Collection with a DatasetDescriptor for each potential dataset .
* @ throws IOException */
public static Collection < DatasetDescriptor > findPotentialDatasets ( FileSystem fs , Path path ) throws IOException { } } | List < DatasetDescriptor > descriptors = Lists . newArrayList ( ) ; Result result = visit ( new FindDatasets ( ) , fs , path ) ; if ( result instanceof Result . Table ) { descriptors . add ( descriptor ( fs , ( Result . Table ) result ) ) ; } else if ( result instanceof Result . Group ) { for ( Result . Table table : ( ( Result . Group ) result ) . tables ) { descriptors . add ( descriptor ( fs , table ) ) ; } } return descriptors ; |
public class File { /** * Returns a Uniform Resource Locator for this file . The URL is system
* dependent and may not be transferable between different operating / file
* systems .
* @ return a URL for this file .
* @ throws java . net . MalformedURLException
* if the path cannot be transformed into a URL .
* @ deprecated Use { @ link # toURI } and { @ link java . net . URI # toURL } to
* correctly escape illegal characters . */
@ Deprecated public URL toURL ( ) throws java . net . MalformedURLException { } } | String name = getAbsoluteName ( ) ; if ( ! name . startsWith ( "/" ) ) { // start with sep .
return new URL ( "file" , "" , - 1 , "/" + name , null ) ; } else if ( name . startsWith ( "//" ) ) { return new URL ( "file:" + name ) ; // UNC path
} return new URL ( "file" , "" , - 1 , name , null ) ; |
public class DateTimePickerRenderer { /** * Get date in string format
* @ param fc The FacesContext
* @ param dtp the DateTimePicker component
* @ param value The date to display
* @ param javaFormatString The format string as defined by the SimpleDateFormat syntax
* @ param locale The locale
* @ return null if the value is null . */
public static String getDateAsString ( FacesContext fc , DateTimePicker dtp , Object value , String javaFormatString , Locale locale ) { } } | if ( value == null ) { return null ; } Converter converter = dtp . getConverter ( ) ; return converter == null ? getInternalDateAsString ( value , javaFormatString , locale ) : converter . getAsString ( fc , dtp , value ) ; |
public class WeightedXMLExcerpt { /** * { @ inheritDoc } */
protected String createExcerpt ( TermPositionVector tpv , String text , int maxFragments , int maxFragmentSize ) throws IOException { } } | return WeightedHighlighter . highlight ( tpv , getQueryTerms ( ) , text , maxFragments , maxFragmentSize / 2 ) ; |
public class TranslationServiceClient { /** * Creates a glossary and returns the long - running operation . Returns NOT _ FOUND , if the project
* doesn ' t exist .
* < p > Sample code :
* < pre > < code >
* try ( TranslationServiceClient translationServiceClient = TranslationServiceClient . create ( ) ) {
* String formattedParent = TranslationServiceClient . formatLocationName ( " [ PROJECT ] " , " [ LOCATION ] " ) ;
* Glossary glossary = Glossary . newBuilder ( ) . build ( ) ;
* Glossary response = translationServiceClient . createGlossaryAsync ( formattedParent , glossary ) . get ( ) ;
* < / code > < / pre >
* @ param parent Required . The project name .
* @ param glossary Required . The glossary to create .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Glossary , CreateGlossaryMetadata > createGlossaryAsync ( String parent , Glossary glossary ) { } } | LOCATION_PATH_TEMPLATE . validate ( parent , "createGlossary" ) ; CreateGlossaryRequest request = CreateGlossaryRequest . newBuilder ( ) . setParent ( parent ) . setGlossary ( glossary ) . build ( ) ; return createGlossaryAsync ( request ) ; |
public class DisasterRecoveryConfigurationsInner { /** * Fails over from the current primary server to this server . This operation might result in data loss .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration to failover forcefully .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginFailoverAllowDataLoss ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName ) { } } | beginFailoverAllowDataLossWithServiceResponseAsync ( resourceGroupName , serverName , disasterRecoveryConfigurationName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ResourceChange { /** * For the < code > Modify < / code > action , indicates which resource attribute is triggering this update , such as a
* change in the resource attribute ' s < code > Metadata < / code > , < code > Properties < / code > , or < code > Tags < / code > .
* @ param scope
* For the < code > Modify < / code > action , indicates which resource attribute is triggering this update , such as
* a change in the resource attribute ' s < code > Metadata < / code > , < code > Properties < / code > , or < code > Tags < / code > .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see ResourceAttribute */
public ResourceChange withScope ( ResourceAttribute ... scope ) { } } | com . amazonaws . internal . SdkInternalList < String > scopeCopy = new com . amazonaws . internal . SdkInternalList < String > ( scope . length ) ; for ( ResourceAttribute value : scope ) { scopeCopy . add ( value . toString ( ) ) ; } if ( getScope ( ) == null ) { setScope ( scopeCopy ) ; } else { getScope ( ) . addAll ( scopeCopy ) ; } return this ; |
public class RestClientUtil { /** * 创建索引文档 , 根据elasticsearch . xml中指定的日期时间格式 , 生成对应时间段的索引表名称
* For Elasticsearch 7 and 7 +
* @ param indexName
* @ param bean
* @ return
* @ throws ElasticSearchException */
public String addDocumentWithParentId ( String indexName , Object bean , Object parentId ) throws ElasticSearchException { } } | return addDocumentWithParentId ( indexName , _doc , bean , parentId ) ; |
public class CPDefinitionLinkPersistenceImpl { /** * Returns a range of all the cp definition links where CProductId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionLinkModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param CProductId the c product ID
* @ param start the lower bound of the range of cp definition links
* @ param end the upper bound of the range of cp definition links ( not inclusive )
* @ return the range of matching cp definition links */
@ Override public List < CPDefinitionLink > findByCProductId ( long CProductId , int start , int end ) { } } | return findByCProductId ( CProductId , start , end , null ) ; |
public class TldFernClassifier { /** * Increments the P and N value for a fern . Also updates the maxP and maxN statistics so that it
* knows when to re - normalize data structures . */
private void increment ( TldFernFeature f , boolean positive ) { } } | if ( positive ) { f . incrementP ( ) ; if ( f . numP > maxP ) maxP = f . numP ; } else { f . incrementN ( ) ; if ( f . numN > maxN ) maxN = f . numN ; } |
public class BaseMessagingEngineImpl { /** * Return the cache of localization point config objects ( not a copy ) .
* @ return Returns the _ lpConfig . */
final ArrayList getLPConfigObjects ( ) { } } | String thisMethodName = "getLPConfigObjects" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , thisMethodName , this ) ; SibTr . exit ( tc , thisMethodName , _lpConfig ) ; } return _lpConfig ; |
public class Functions { /** * Returns a function which performs a map lookup . The returned function throws an { @ link
* IllegalArgumentException } if given a key that does not exist in the map . See also { @ link
* # forMap ( Map , Object ) } , which returns a default value in this case . */
public static < K , V > Function < K , V > forMap ( Map < K , V > map ) { } } | return key -> { if ( map . containsKey ( key ) ) { return map . get ( key ) ; } else { throw new IllegalArgumentException ( "Key '" + key + "' not present in map" ) ; } } ; |
public class BucketNotificationConfiguration { /** * Gets the list of
* { @ link BucketNotificationConfiguration . TopicConfiguration } objects
* contained in this object . This method may return an empty list if no
* < code > TopicConfiguration < / code > objects are present .
* This method is deprecated and will not return all the notification
* configuration associated with the Amazon S3 bucket . To retrieve all the
* configuration use @ see
* BucketNotificationConfiguration # getConfigurations ( )
* @ deprecated
* @ see BucketNotificationConfiguration # getConfigurations ( )
* @ return The list of < code > TopicConfiguration < / code > objects contained in
* this object . May return an empty list . */
public List < TopicConfiguration > getTopicConfigurations ( ) { } } | List < TopicConfiguration > topicConfigs = new ArrayList < BucketNotificationConfiguration . TopicConfiguration > ( ) ; for ( Map . Entry < String , NotificationConfiguration > entry : configurations . entrySet ( ) ) { if ( entry . getValue ( ) instanceof TopicConfiguration ) { topicConfigs . add ( ( TopicConfiguration ) entry . getValue ( ) ) ; } } return topicConfigs ; |
public class Utils { /** * Stores the resources from a directory into a map .
* @ param directory an existing directory
* @ param exclusionPatteners a non - null list of exclusion patterns for file names ( e . g . " . * \ \ . properties " )
* @ return a non - null map ( key = the file location , relative to the directory , value = file content )
* @ throws IOException if something went wrong while reading a file */
public static Map < String , byte [ ] > storeDirectoryResourcesAsBytes ( File directory , List < String > exclusionPatteners ) throws IOException { } } | if ( ! directory . exists ( ) ) throw new IllegalArgumentException ( "The resource directory was not found. " + directory . getAbsolutePath ( ) ) ; if ( ! directory . isDirectory ( ) ) throw new IllegalArgumentException ( "The resource directory is not a valid directory. " + directory . getAbsolutePath ( ) ) ; Map < String , byte [ ] > result = new HashMap < > ( ) ; List < File > resourceFiles = listAllFiles ( directory , false ) ; fileLoop : for ( File file : resourceFiles ) { for ( String exclusionPattern : exclusionPatteners ) { if ( file . getName ( ) . matches ( exclusionPattern ) ) continue fileLoop ; } String key = computeFileRelativeLocation ( directory , file ) ; ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; Utils . copyStream ( file , os ) ; result . put ( key , os . toByteArray ( ) ) ; } return result ; |
public class CRFLogConditionalObjectiveFunction { /** * Computes value of function for specified value of x ( scaled by xscale )
* only over samples indexed by batch
* NOTE : This function does not do regularization ( regularization is done by minimizer )
* @ param x - unscaled weights
* @ param xscale - how much to scale x by when performing calculations
* @ param batch - indices of which samples to compute function over
* @ return value of function at specified x ( scaled by xscale ) for samples */
public double valueAt ( double [ ] x , double xscale , int [ ] batch ) { } } | double prob = 0 ; // the log prob of the sequence given the model , which is the negation of value at this point
double [ ] weights = x ; int [ ] [ ] wis = getWeightIndices ( ) ; int [ ] given = new int [ window - 1 ] ; int [ ] [ ] docCliqueLabels = new int [ window ] [ ] ; for ( int j = 0 ; j < window ; j ++ ) { docCliqueLabels [ j ] = new int [ j + 1 ] ; } // iterate over all the documents
for ( int m = 0 ; m < batch . length ; m ++ ) { int ind = batch [ m ] ; int [ ] [ ] [ ] docData = data [ ind ] ; int [ ] docLabels = labels [ ind ] ; // make a clique tree for this document
CRFCliqueTree cliqueTree = CRFCliqueTree . getCalibratedCliqueTree ( weights , xscale , wis , docData , labelIndices , numClasses , classIndex , backgroundSymbol ) ; // compute the log probability of the document given the model with the parameters x
Arrays . fill ( given , classIndex . indexOf ( backgroundSymbol ) ) ; if ( docLabels . length > docData . length ) { // only true for self - training
// fill the given array with the extra docLabels
System . arraycopy ( docLabels , 0 , given , 0 , given . length ) ; // shift the docLabels array left
int [ ] newDocLabels = new int [ docData . length ] ; System . arraycopy ( docLabels , docLabels . length - newDocLabels . length , newDocLabels , 0 , newDocLabels . length ) ; docLabels = newDocLabels ; } // iterate over the positions in this document
for ( int i = 0 ; i < docData . length ; i ++ ) { int label = docLabels [ i ] ; double p = cliqueTree . condLogProbGivenPrevious ( i , label , given ) ; if ( VERBOSE ) { System . err . println ( "P(" + label + "|" + ArrayMath . toString ( given ) + ")=" + p ) ; } prob += p ; // Shift window over
System . arraycopy ( given , 1 , given , 0 , given . length - 1 ) ; given [ given . length - 1 ] = label ; } } if ( Double . isNaN ( prob ) ) { // shouldn ' t be the case
throw new RuntimeException ( "Got NaN for prob in CRFLogConditionalObjectiveFunction.calculate()" ) ; } value = - prob ; return value ; |
public class V1InstanceCreator { /** * Create a new effort record with a value and date , assigned to the given
* workitem and member .
* @ param value the value of the effort record .
* @ param item the workitem to assign the effort record to .
* @ param member the member to assign the effort record to . If is null then
* member not set .
* @ param date the date to log the effort record against . If is null then
* date not set .
* @ return A newly minted Effort Record that exists in the VersionOne
* system .
* @ throws IllegalStateException if Effort tracking is not enabled . */
public Effort effort ( double value , Workitem item , Member member , DateTime date ) throws IllegalStateException { } } | return effort ( value , item , member , date , null ) ; |
public class CPDefinitionLinkUtil { /** * Returns the last cp definition link in the ordered set where CProductId = & # 63 ; and type = & # 63 ; .
* @ param CProductId the c product ID
* @ param type the type
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition link , or < code > null < / code > if a matching cp definition link could not be found */
public static CPDefinitionLink fetchByCP_T_Last ( long CProductId , String type , OrderByComparator < CPDefinitionLink > orderByComparator ) { } } | return getPersistence ( ) . fetchByCP_T_Last ( CProductId , type , orderByComparator ) ; |
public class CacheMap { /** * Add an entry to the map consisting of the specified ( key , value ) pair . A key is allowed
* to simultaneously map to multiple values . If the maximum number of entries for the
* CacheMap is exceeded , an entry is discarded .
* @ param key the key .
* @ param value the value .
* @ return the discarded value , or null if none . */
public final Object add ( Object key , Object value ) { } } | int bucketIndex = ( key . hashCode ( ) & Integer . MAX_VALUE ) % numBuckets ; int bucketSize = bucketSizes [ bucketIndex ] ++ ; // Discard an entry from the bucket if it ' s already at the maximum bucket size .
Object discardedObject = bucketSize == maxBucketSize ? discardFromBucket ( bucketIndex , -- bucketSize ) : null ; // Add the new entry . We might temporarily exceed the maximum entry limit by 1 . In
// that case we will end up removing an entry before returning .
values [ bucketIndex ] [ bucketSize ] = value ; keys [ bucketIndex ] [ bucketSize ] = key ; // Update the MRU / LRU pointer for the current bucket . This involves removing the
// current pointer , if it exists , and creating a new MRU / LRU pointer at the MRU end
// of the list .
// Remove current pointer , if there is one .
int n , p ; if ( ( n = next [ bucketIndex ] ) != BEFORE_LRU ) { previous [ n ] = p = previous [ bucketIndex ] ; next [ p ] = n ; } // Create a new MRU / LRU pointer at the MRU end of the list .
p = previous [ AFTER_MRU ] ; previous [ AFTER_MRU ] = next [ p ] = bucketIndex ; next [ bucketIndex ] = AFTER_MRU ; previous [ bucketIndex ] = p ; // If we exceeded the upper limit on entries in the cache map , remove a LRU entry and
// return it . Otherwise , if we had to discard an entry to keep from exceeding the
// maximum bucket size , return that entry . Otherwise , return null .
return ++ numEntries > maxEntries ? removeLRU ( ) : discardedObject ; |
public class MIMEUtil { /** * Returns all MIME types for the given file extension .
* @ param pFileExt the file extension
* @ return a { @ link List } of { @ code String } s containing the MIME types , or an empty
* list , if there are no known MIME types for the given file extension . */
public static List < String > getMIMETypes ( final String pFileExt ) { } } | List < String > types = sExtToMIME . get ( StringUtil . toLowerCase ( pFileExt ) ) ; return maskNull ( types ) ; |
public class CacheManager { /** * Get the default cache manager for the default class loader . The default class loader
* is the class loader used to load the cache2k implementation classes .
* < p > The name of default cache manager is { @ code " default " } .
* This may be changed , by { @ link # setDefaultName ( String ) } . */
public static CacheManager getInstance ( ) { } } | ClassLoader _defaultClassLoader = PROVIDER . getDefaultClassLoader ( ) ; return PROVIDER . getManager ( _defaultClassLoader , PROVIDER . getDefaultManagerName ( _defaultClassLoader ) ) ; |
public class FileComparer { /** * Removes all the similar parts from all the files
* @ param filepathes
* @ throws IOException */
public static void removeSimilaritiesAndSaveFiles ( List < String > filepathes , Log logging , Boolean isWindows ) throws IOException { } } | List < File > files = new LinkedList < File > ( ) ; for ( String path : filepathes ) { files . add ( new File ( path ) ) ; } FileComparer fcomparer ; for ( int i = 0 ; i < files . size ( ) ; i ++ ) { for ( int y = i + 1 ; y < files . size ( ) ; y ++ ) { fcomparer = new FileComparer ( files . get ( i ) , files . get ( y ) , logging , isWindows ) ; fcomparer . removeSimilarClassesFromFile1 ( ) ; } } |
public class FileEditPanel { /** * < / editor - fold > / / GEN - END : initComponents */
private void labelBrowseCurrentLinkMouseClicked ( java . awt . event . MouseEvent evt ) { } } | // GEN - FIRST : event _ labelBrowseCurrentLinkMouseClicked
if ( evt . getClickCount ( ) > 1 ) { final File file = new File ( this . textFieldFilePath . getText ( ) . trim ( ) ) ; NbUtils . openInSystemViewer ( null , file ) ; } |
public class CmsImportView { /** * Processes the result of the import operation from the server . < p >
* @ param results the string containing the results of the import sent by the server */
protected void handleImportResults ( List < CmsAliasImportResult > results ) { } } | clearResults ( ) ; for ( CmsAliasImportResult singleResult : results ) { addImportResult ( singleResult ) ; } |
public class JaxbUtils { /** * Marshals the given data . A < code > null < / code > data argument returns < code > null < / code > .
* @ param data
* Data to serialize or < code > null < / code > .
* @ param adapters
* Adapters to associate with the marshaller or < code > null < / code > .
* @ param classesToBeBound
* List of java classes to be recognized by the { @ link JAXBContext } - Cannot be < code > null < / code > .
* @ return XML data or < code > null < / code > .
* @ param < T >
* Type of the data . */
public static < T > String marshal ( final T data , final XmlAdapter < ? , ? > [ ] adapters , final Class < ? > ... classesToBeBound ) { } } | if ( data == null ) { return null ; } try { final JAXBContext ctx = JAXBContext . newInstance ( classesToBeBound ) ; return marshal ( ctx , data , adapters ) ; } catch ( final JAXBException ex ) { throw new RuntimeException ( ERROR_MARSHALLING_TEST_DATA , ex ) ; } |
public class TriggerBuilder { /** * Use a TriggerKey with the given name and group to identify the Trigger .
* If none of the ' withIdentity ' methods are set on the TriggerBuilder , then a
* random , unique TriggerKey will be generated .
* @ param name
* the name element for the Trigger ' s TriggerKey
* @ param group
* the group element for the Trigger ' s TriggerKey
* @ return the updated TriggerBuilder
* @ see TriggerKey
* @ see ITrigger # getKey ( ) */
@ Nonnull public TriggerBuilder < T > withIdentity ( final String name , final String group ) { } } | m_aKey = new TriggerKey ( name , group ) ; return this ; |
public class FileUtils { /** * Attempts to the get the canonical form of the given file , otherwise returns the absolute form of the file .
* @ param file the { @ link File } from which the canonical or absolute file will be returned .
* @ return the canonical form of the file unless an IOException occurs then return the absolute form of the file .
* @ throws NullPointerException if the file reference is null .
* @ see java . io . File # getAbsoluteFile ( )
* @ see java . io . File # getCanonicalFile ( ) */
public static File tryGetCanonicalFileElseGetAbsoluteFile ( File file ) { } } | try { return file . getCanonicalFile ( ) ; } catch ( IOException ignore ) { return file . getAbsoluteFile ( ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcElectricalCircuit ( ) { } } | if ( ifcElectricalCircuitEClass == null ) { ifcElectricalCircuitEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 194 ) ; } return ifcElectricalCircuitEClass ; |
public class protocoludp_stats { /** * < pre >
* converts nitro response into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_response ( nitro_service service , String response ) throws Exception { } } | protocoludp_stats [ ] resources = new protocoludp_stats [ 1 ] ; protocoludp_response result = ( protocoludp_response ) service . get_payload_formatter ( ) . string_to_resource ( protocoludp_response . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == 444 ) { service . clear_session ( ) ; } if ( result . severity != null ) { if ( result . severity . equals ( "ERROR" ) ) throw new nitro_exception ( result . message , result . errorcode ) ; } else { throw new nitro_exception ( result . message , result . errorcode ) ; } } resources [ 0 ] = result . protocoludp ; return resources ; |
public class FastSerializer { /** * Write a varbinary in the standard VoltDB way . That is , four
* bytes of length info followed by the bytes .
* @ param bin The byte array value to be serialized .
* @ throws IOException Rethrows any IOExceptions thrown . */
public void writeVarbinary ( byte [ ] bin ) throws IOException { } } | if ( bin == null ) { writeInt ( VoltType . NULL_STRING_LENGTH ) ; return ; } if ( bin . length > VoltType . MAX_VALUE_LENGTH ) { throw new IOException ( "Varbinary exceeds maximum length of " + VoltType . MAX_VALUE_LENGTH + " bytes." ) ; } writeInt ( bin . length ) ; write ( bin ) ; |
public class AwsSecurityFindingFilters { /** * Exclusive to findings that are generated as the result of a check run against a specific rule in a supported
* standard ( for example , AWS CIS Foundations ) . Contains compliance - related finding details .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setComplianceStatus ( java . util . Collection ) } or { @ link # withComplianceStatus ( java . util . Collection ) } if you
* want to override the existing values .
* @ param complianceStatus
* Exclusive to findings that are generated as the result of a check run against a specific rule in a
* supported standard ( for example , AWS CIS Foundations ) . Contains compliance - related finding details .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withComplianceStatus ( StringFilter ... complianceStatus ) { } } | if ( this . complianceStatus == null ) { setComplianceStatus ( new java . util . ArrayList < StringFilter > ( complianceStatus . length ) ) ; } for ( StringFilter ele : complianceStatus ) { this . complianceStatus . add ( ele ) ; } return this ; |
public class InternalXbaseParser { /** * InternalXbase . g : 58:1 : entryRuleXExpression : ruleXExpression EOF ; */
public final void entryRuleXExpression ( ) throws RecognitionException { } } | try { // InternalXbase . g : 59:1 : ( ruleXExpression EOF )
// InternalXbase . g : 60:1 : ruleXExpression EOF
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getXExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { after ( grammarAccess . getXExpressionRule ( ) ) ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ; |
public class DescribeNetworkAclsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeNetworkAclsRequest > getDryRunRequest ( ) { } } | Request < DescribeNetworkAclsRequest > request = new DescribeNetworkAclsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class MetadataFinder { /** * Given a status update from a CDJ , find the metadata for the track that it has loaded , if any . If there is
* an appropriate metadata cache , will use that , otherwise makes a query to the players dbserver .
* @ param status the CDJ status update that will be used to determine the loaded track and ask the appropriate
* player for metadata about it
* @ return the metadata that was obtained , if any */
@ SuppressWarnings ( "WeakerAccess" ) public TrackMetadata requestMetadataFrom ( final CdjStatus status ) { } } | if ( status . getTrackSourceSlot ( ) == CdjStatus . TrackSourceSlot . NO_TRACK || status . getRekordboxId ( ) == 0 ) { return null ; } final DataReference track = new DataReference ( status . getTrackSourcePlayer ( ) , status . getTrackSourceSlot ( ) , status . getRekordboxId ( ) ) ; return requestMetadataFrom ( track , status . getTrackType ( ) ) ; |
public class dns_stats { /** * < pre >
* converts nitro response into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_response ( nitro_service service , String response ) throws Exception { } } | dns_stats [ ] resources = new dns_stats [ 1 ] ; dns_response result = ( dns_response ) service . get_payload_formatter ( ) . string_to_resource ( dns_response . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == 444 ) { service . clear_session ( ) ; } if ( result . severity != null ) { if ( result . severity . equals ( "ERROR" ) ) throw new nitro_exception ( result . message , result . errorcode ) ; } else { throw new nitro_exception ( result . message , result . errorcode ) ; } } resources [ 0 ] = result . dns ; return resources ; |
public class DeviceProxyDAODefaultImpl { public DeviceAttribute [ ] write_read_attribute ( final DeviceProxy deviceProxy , final DeviceAttribute [ ] deviceAttributes , final String [ ] readNames ) throws DevFailed { } } | checkIfTango ( deviceProxy , "write_read_attribute" ) ; build_connection ( deviceProxy ) ; // Manage Access control
if ( deviceProxy . access == TangoConst . ACCESS_READ ) { // ping the device to throw exception if failed ( for reconnection )
ping ( deviceProxy ) ; throwNotAuthorizedException ( deviceProxy . devname + ".write_read_attribute()" , "DeviceProxy.write_read_attribute()" ) ; } // Build an AttributeValue IDL object array
AttributeValue_4 [ ] attributeValues_4 = new AttributeValue_4 [ 0 ] ; AttributeValue_4 [ ] outAttrValues_4 = new AttributeValue_4 [ 0 ] ; AttributeValue_5 [ ] outAttrValues_5 = new AttributeValue_5 [ 0 ] ; if ( deviceProxy . device_5 != null || deviceProxy . device_4 != null ) { attributeValues_4 = new AttributeValue_4 [ deviceAttributes . length ] ; for ( int i = 0 ; i < deviceAttributes . length ; i ++ ) { attributeValues_4 [ i ] = deviceAttributes [ i ] . getAttributeValueObject_4 ( ) ; } } else { Except . throw_connection_failed ( "TangoApi_READ_ONLY_MODE" , "Cannot execute write_read_attribute(), " + deviceProxy . devname + " is not a device_4Impl or above" , "DeviceProxy.write_read_attribute()" ) ; } boolean done = false ; final int retries = deviceProxy . transparent_reconnection ? 2 : 1 ; for ( int i = 0 ; i < retries && ! done ; i ++ ) { // write attributes on device server
try { if ( deviceProxy . device_5 != null ) { outAttrValues_5 = deviceProxy . device_5 . write_read_attributes_5 ( attributeValues_4 , readNames , DevLockManager . getInstance ( ) . getClntIdent ( ) ) ; } else if ( deviceProxy . device_4 != null ) { outAttrValues_4 = deviceProxy . device_4 . write_read_attributes_4 ( attributeValues_4 , DevLockManager . getInstance ( ) . getClntIdent ( ) ) ; } done = true ; } catch ( final DevFailed e ) { // Except . print _ exception ( e ) ;
throw e ; } catch ( final MultiDevFailed e ) { throw new NamedDevFailedList ( e , name ( deviceProxy ) , "DeviceProxy.write_read_attribute" , "MultiDevFailed" ) ; } catch ( final Exception e ) { manageExceptionReconnection ( deviceProxy , retries , i , e , this . getClass ( ) + ".write_read_attribute" ) ; } } // End of for ( ; ; )
// Build a Device Attribute Object
// Depends on Device _ impl version
if ( deviceProxy . device_5 != null ) { final DeviceAttribute [ ] attributes = new DeviceAttribute [ outAttrValues_5 . length ] ; for ( int i = 0 ; i < outAttrValues_5 . length ; i ++ ) { attributes [ i ] = new DeviceAttribute ( outAttrValues_5 [ i ] ) ; } return attributes ; } else if ( deviceProxy . device_4 != null ) { final DeviceAttribute [ ] attributes = new DeviceAttribute [ outAttrValues_4 . length ] ; for ( int i = 0 ; i < outAttrValues_4 . length ; i ++ ) { attributes [ i ] = new DeviceAttribute ( outAttrValues_4 [ i ] ) ; } return attributes ; } else return null ; // Cannot be possible ( write _ read did not exist before |
public class MapperConstructor { /** * if it is a null setting returns the null mapping
* @ param makeDest true if destination is a new instance
* @ param mtd mapping type of destination
* @ param mts mapping type of source
* @ param result StringBuilder used for mapping
* @ return true if operation is a null setting , false otherwise */
private boolean isNullSetting ( boolean makeDest , MappingType mtd , MappingType mts , StringBuilder result ) { } } | if ( makeDest && ( mtd == ALL_FIELDS || mtd == ONLY_VALUED_FIELDS ) && mts == ONLY_NULL_FIELDS ) { result . append ( " " + stringOfSetDestination + "(null);" + newLine ) ; return true ; } return false ; |
public class AWSCodeCommitClient { /** * Gets information about triggers configured for a repository .
* @ param getRepositoryTriggersRequest
* Represents the input of a get repository triggers operation .
* @ return Result of the GetRepositoryTriggers operation returned by the service .
* @ throws RepositoryNameRequiredException
* A repository name is required but was not specified .
* @ throws InvalidRepositoryNameException
* At least one specified repository name is not valid . < / p > < note >
* This exception only occurs when a specified repository name is not valid . Other exceptions occur when a
* required repository parameter is missing , or when a specified repository does not exist .
* @ throws RepositoryDoesNotExistException
* The specified repository does not exist .
* @ throws EncryptionIntegrityChecksFailedException
* An encryption integrity check failed .
* @ throws EncryptionKeyAccessDeniedException
* An encryption key could not be accessed .
* @ throws EncryptionKeyDisabledException
* The encryption key is disabled .
* @ throws EncryptionKeyNotFoundException
* No encryption key was found .
* @ throws EncryptionKeyUnavailableException
* The encryption key is not available .
* @ sample AWSCodeCommit . GetRepositoryTriggers
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codecommit - 2015-04-13 / GetRepositoryTriggers "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetRepositoryTriggersResult getRepositoryTriggers ( GetRepositoryTriggersRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetRepositoryTriggers ( request ) ; |
public class Expression { /** * A simple helper that calls through to { @ link MethodRef # invokeVoid ( Expression . . . ) } , but allows a
* more natural fluent call style . */
public Statement invokeVoid ( MethodRef method , Expression ... args ) { } } | return method . invokeVoid ( ImmutableList . < Expression > builder ( ) . add ( this ) . add ( args ) . build ( ) ) ; |
public class FunctionApplication { /** * Parse command line entries .
* @ param args command line options
* @ return command line built with options */
static CommandLine parseCommandLine ( String [ ] args ) { } } | return CommandLine . build ( ) . addOption ( DATA_OPTION , "For passing the data" ) . addOption ( DEBUG_OPTIONS , "For outputting debug information" ) . parse ( args ) ; |
public class AbstractDraweeController { /** * Adds controller listener . */
public void addControllerListener ( ControllerListener < ? super INFO > controllerListener ) { } } | Preconditions . checkNotNull ( controllerListener ) ; if ( mControllerListener instanceof InternalForwardingListener ) { ( ( InternalForwardingListener < INFO > ) mControllerListener ) . addListener ( controllerListener ) ; return ; } if ( mControllerListener != null ) { mControllerListener = InternalForwardingListener . createInternal ( mControllerListener , controllerListener ) ; return ; } // Listener only receives < INFO > , it never produces one .
// That means if it can accept < ? super INFO > , it can very well accept < INFO > .
mControllerListener = ( ControllerListener < INFO > ) controllerListener ; |
public class MoonPosition { /** * max error given by J . Meeus : 10 ' ' in longitude and 4 ' ' in latitude */
static double [ ] calculateMeeus ( double jct ) { } } | // jct = julian centuries since J2000 in ephemeris time
// Meeus ( 47.1 ) : L '
double meanLongitude = normalize ( 218.3164477 + ( 481267.88123421 + ( - 0.0015786 + ( 1.0 / 538841 + ( - 1.0 / 65194000 ) * jct ) * jct ) * jct ) * jct ) ; // Meeus ( 47.2 ) : D
double meanElongation = normalize ( 297.8501921 + ( 445267.1114034 + ( - 0.0018819 + ( 1.0 / 545868 + ( 1.0 / 113065000 ) * jct ) * jct ) * jct ) * jct ) ; // Meeus ( 47.3 ) : M
double meanAnomalySun = normalize ( 357.5291092 + ( 35999.0502909 + ( - 0.0001536 + ( 1.0 / 24490000 ) * jct ) * jct ) * jct ) ; // Meeus ( 47.4 ) : M '
double meanAnomalyMoon = normalize ( 134.9633964 + ( 477198.8675055 + ( 0.0087414 + ( 1.0 / 69699 + ( 1.0 / 14712000 ) * jct ) * jct ) * jct ) * jct ) ; // Meeus ( 47.5 ) : F
double meanDistance = normalize ( 93.272095 + ( 483202.0175233 + ( - 0.0036539 + ( - 1.0 / 3526000 + ( 1.0 / 863310000 ) * jct ) * jct ) * jct ) * jct ) ; // Meeus ( 47.6)
double e = 1 - ( 0.002516 + 0.0000074 * jct ) * jct ; double ee = e * e ; double sumL = 0.0 ; double sumR = 0.0 ; for ( int i = A_D . length - 1 ; i >= 0 ; i -- ) { double eFactor ; switch ( A_M [ i ] ) { case - 1 : case 1 : eFactor = e ; break ; case - 2 : case 2 : eFactor = ee ; break ; default : eFactor = 1 ; } double arg = Math . toRadians ( A_D [ i ] * meanElongation + A_M [ i ] * meanAnomalySun + A_M2 [ i ] * meanAnomalyMoon + A_F [ i ] * meanDistance ) ; sumL += ( COEFF_L [ i ] * eFactor * Math . sin ( arg ) ) ; sumR += ( COEFF_R [ i ] * eFactor * Math . cos ( arg ) ) ; } double sumB = 0.0 ; for ( int i = B_D . length - 1 ; i >= 0 ; i -- ) { double eFactor ; switch ( B_M [ i ] ) { case - 1 : case 1 : eFactor = e ; break ; case - 2 : case 2 : eFactor = ee ; break ; default : eFactor = 1 ; } double arg = B_D [ i ] * meanElongation + B_M [ i ] * meanAnomalySun + B_M2 [ i ] * meanAnomalyMoon + B_F [ i ] * meanDistance ; sumB += ( COEFF_B [ i ] * eFactor * Math . sin ( Math . toRadians ( arg ) ) ) ; } double a1 = 119.75 + 131.849 * jct ; double a2 = 53.09 + 479264.29 * jct ; double a3 = 313.45 + 481266.484 * jct ; sumL += ( 3958 * Math . sin ( Math . toRadians ( a1 ) ) + 1962 * Math . sin ( Math . toRadians ( meanLongitude - meanDistance ) ) + 318 * Math . sin ( Math . toRadians ( a2 ) ) ) ; sumB += ( - 2235 * Math . sin ( Math . toRadians ( meanLongitude ) ) + 382 * Math . sin ( Math . toRadians ( a3 ) ) + 175 * Math . sin ( Math . toRadians ( a1 - meanDistance ) ) + 175 * Math . sin ( Math . toRadians ( a1 + meanDistance ) ) + 127 * Math . sin ( Math . toRadians ( meanLongitude - meanAnomalyMoon ) ) - 115 * Math . sin ( Math . toRadians ( meanLongitude + meanAnomalyMoon ) ) ) ; double [ ] result = new double [ 5 ] ; StdSolarCalculator . nutations ( jct , result ) ; double trueObliquity = StdSolarCalculator . meanObliquity ( jct ) + result [ 1 ] ; double obliquityRad = Math . toRadians ( trueObliquity ) ; double lngRad = Math . toRadians ( meanLongitude + ( sumL / MIO ) + result [ 0 ] ) ; double latRad = Math . toRadians ( sumB / MIO ) ; double distance = 385000.56 + ( sumR / 1000 ) ; // in km between centers of Earth and Moon
double ra = Math . atan2 ( Math . sin ( lngRad ) * Math . cos ( obliquityRad ) - Math . tan ( latRad ) * Math . sin ( obliquityRad ) , Math . cos ( lngRad ) ) ; double decl = Math . asin ( Math . sin ( latRad ) * Math . cos ( obliquityRad ) + Math . cos ( latRad ) * Math . sin ( obliquityRad ) * Math . sin ( lngRad ) ) ; // already set : result [ 0 ] = nutation - in - longitude
result [ 1 ] = trueObliquity ; // in degrees
result [ 2 ] = AstroUtils . toRange_0_360 ( Math . toDegrees ( ra ) ) ; result [ 3 ] = Math . toDegrees ( decl ) ; result [ 4 ] = distance ; // in km
return result ; |
public class UseVarArgs { /** * overrides the visitor to look for methods that has an array as a last
* parameter of an array type , where the base type is not like the previous
* parameter nor something like a char or byte array .
* @ param obj the currently parse method */
@ Override public void visitMethod ( Method obj ) { } } | try { if ( obj . isSynthetic ( ) ) { return ; } if ( Values . CONSTRUCTOR . equals ( getMethodName ( ) ) && javaClass . getClassName ( ) . contains ( "$" ) ) { return ; } List < String > types = SignatureUtils . getParameterSignatures ( obj . getSignature ( ) ) ; if ( ( types . isEmpty ( ) ) || ( types . size ( ) > 2 ) ) { return ; } if ( ( obj . getAccessFlags ( ) & Const . ACC_VARARGS ) != 0 ) { return ; } String lastParmSig = types . get ( types . size ( ) - 1 ) ; if ( ! lastParmSig . startsWith ( Values . SIG_ARRAY_PREFIX ) || lastParmSig . startsWith ( Values . SIG_ARRAY_OF_ARRAYS_PREFIX ) ) { return ; } if ( SignatureBuilder . SIG_BYTE_ARRAY . equals ( lastParmSig ) || SignatureBuilder . SIG_CHAR_ARRAY . equals ( lastParmSig ) ) { return ; } if ( hasSimilarParms ( types ) ) { return ; } if ( obj . isStatic ( ) && "main" . equals ( obj . getName ( ) ) && SIG_STRING_ARRAY_TO_VOID . equals ( obj . getSignature ( ) ) ) { return ; } if ( ! obj . isPrivate ( ) && ! obj . isStatic ( ) && isInherited ( obj ) ) { return ; } super . visitMethod ( obj ) ; bugReporter . reportBug ( new BugInstance ( this , BugType . UVA_USE_VAR_ARGS . name ( ) , LOW_PRIORITY ) . addClass ( this ) . addMethod ( this ) ) ; } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } |
public class SubtitleChatOverlay { /** * documentation inherited from superinterface ChatDisplay */
public boolean displayMessage ( ChatMessage message , boolean alreadyDisplayed ) { } } | // nothing doing if we ' ve not been laid out
if ( ! isLaidOut ( ) ) { return false ; } // possibly display it now
Graphics2D gfx = getTargetGraphics ( ) ; if ( gfx != null ) { displayMessage ( message , gfx ) ; // display it
gfx . dispose ( ) ; // clean up
return true ; } return false ; |
public class MainApplication { /** * Get the user registration record .
* And create it if it doesn ' t exist yet .
* @ return The user registration record . */
public Record getUserRegistration ( ) { } } | Record recUserRegistration = ( Record ) m_systemRecordOwner . getRecord ( UserRegistrationModel . USER_REGISTRATION_FILE ) ; if ( recUserRegistration == null ) recUserRegistration = Record . makeRecordFromClassName ( UserRegistrationModel . THICK_CLASS , m_systemRecordOwner ) ; return recUserRegistration ; |
public class JmxClient { /** * Return an array of the attributes associated with the bean name . */
public MBeanAttributeInfo [ ] getAttributesInfo ( String domainName , String beanName ) throws JMException { } } | return getAttributesInfo ( ObjectNameUtil . makeObjectName ( domainName , beanName ) ) ; |
public class PreviousPageInfo { /** * Reinitialize the stored ActionMapping and PageFlowController objects . These are transient , and will be lost if
* you place this object in the session , and then retrieve it after a session failover has occurred ( i . e . , after
* this object has been serialized and deserialized ) . */
public void reinitialize ( PageFlowController pfc ) { } } | if ( _mapping == null && _mappingPath != null ) { ModuleConfig mc = pfc . getModuleConfig ( ) ; assert mc != null : "no ModuleConfig found for " + pfc . getClass ( ) . getName ( ) ; _mapping = ( ActionMapping ) mc . findActionConfig ( _mappingPath ) ; } if ( _forward != null && _forward instanceof Forward ) { ( ( Forward ) _forward ) . reinitialize ( pfc ) ; } |
public class CalendarCodeGenerator { /** * Implements the formatSkeleton method . */
private MethodSpec buildSkeletonFormatter ( List < Skeleton > skeletons ) { } } | MethodSpec . Builder method = MethodSpec . methodBuilder ( "formatSkeleton" ) . addAnnotation ( Override . class ) . addModifiers ( PUBLIC ) . addParameter ( String . class , "skeleton" ) . addParameter ( ZonedDateTime . class , "d" ) . addParameter ( StringBuilder . class , "b" ) . returns ( boolean . class ) ; method . beginControlFlow ( "if (skeleton == null)" ) ; method . addStatement ( "return false" ) ; method . endControlFlow ( ) ; method . beginControlFlow ( "switch (skeleton)" ) ; // Skeleton patterns .
for ( Skeleton skeleton : skeletons ) { method . beginControlFlow ( "case $S:" , skeleton . skeleton ) . addComment ( "Pattern: $S" , skeleton . pattern ) ; addPattern ( method , skeleton . pattern ) ; method . addStatement ( "break" ) ; method . endControlFlow ( ) ; } method . beginControlFlow ( "default:" ) ; method . addStatement ( "return false" ) ; method . endControlFlow ( ) ; method . endControlFlow ( ) ; method . addStatement ( "return true" ) ; return method . build ( ) ; |
public class A_CmsUploadDialog { /** * Sets the target folder . < p >
* @ param target the target folder to set */
public void setTargetFolder ( String target ) { } } | m_targetFolder = target ; setCaption ( Messages . get ( ) . key ( Messages . GUI_UPLOAD_DIALOG_TITLE_1 , m_targetFolder ) ) ; |
public class SnapshotDaemon { /** * Search for truncation snapshots , after a failure there may be
* ones we don ' t know about , there may be ones from a previous instance etc .
* Do this every five minutes as an easy hack to make sure we don ' t leak them .
* Next time groom is called it will delete the old ones after a success . */
private void scanTruncationSnapshots ( ) { } } | if ( m_truncationSnapshotPath == null ) { try { m_truncationSnapshotPath = new String ( m_zk . getData ( VoltZK . test_scan_path , false , null ) , "UTF-8" ) ; } catch ( Exception e ) { return ; } } Object params [ ] = new Object [ 1 ] ; params [ 0 ] = m_truncationSnapshotPath ; long handle = m_nextCallbackHandle ++ ; m_procedureCallbacks . put ( handle , new ProcedureCallback ( ) { @ Override public void clientCallback ( final ClientResponse clientResponse ) throws Exception { if ( clientResponse . getStatus ( ) != ClientResponse . SUCCESS ) { SNAP_LOG . error ( clientResponse . getStatusString ( ) ) ; return ; } final VoltTable results [ ] = clientResponse . getResults ( ) ; if ( results . length == 1 ) { final VoltTable result = results [ 0 ] ; boolean advanced = result . advanceRow ( ) ; assert ( advanced ) ; assert ( result . getColumnCount ( ) == 1 ) ; assert ( result . getColumnType ( 0 ) == VoltType . STRING ) ; loggingLog . error ( "Snapshot scan failed with failure response: " + result . getString ( "ERR_MSG" ) ) ; return ; } assert ( results . length == 3 ) ; final VoltTable snapshots = results [ 0 ] ; assert ( snapshots . getColumnCount ( ) == 10 ) ; TreeMap < Long , TruncationSnapshotAttempt > foundSnapshots = new TreeMap < Long , TruncationSnapshotAttempt > ( ) ; while ( snapshots . advanceRow ( ) ) { final String path = snapshots . getString ( "PATH" ) ; final String pathType = snapshots . getString ( "PATHTYPE" ) ; final String nonce = snapshots . getString ( "NONCE" ) ; final Long txnId = snapshots . getLong ( "TXNID" ) ; TruncationSnapshotAttempt snapshotAttempt = new TruncationSnapshotAttempt ( ) ; snapshotAttempt . path = path ; snapshotAttempt . pathType = pathType ; snapshotAttempt . nonce = nonce ; foundSnapshots . put ( txnId , snapshotAttempt ) ; } for ( Map . Entry < Long , TruncationSnapshotAttempt > entry : foundSnapshots . entrySet ( ) ) { if ( ! m_truncationSnapshotAttempts . containsKey ( entry . getKey ( ) ) ) { loggingLog . info ( "Truncation snapshot scan discovered new snapshot txnid " + entry . getKey ( ) + " path " + entry . getValue ( ) . path + " nonce " + entry . getValue ( ) . nonce ) ; m_truncationSnapshotAttempts . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } } } ) ; m_initiator . initiateSnapshotDaemonWork ( "@SnapshotScan" , handle , params ) ; |
public class CmsSecurityManager { /** * Counts the locked resources in this project . < p >
* @ param context the current request context
* @ param id the id of the project
* @ return the amount of locked resources in this project
* @ throws CmsException if something goes wrong
* @ throws CmsRoleViolationException if the current user does not have management access to the project */
public int countLockedResources ( CmsRequestContext context , CmsUUID id ) throws CmsException , CmsRoleViolationException { } } | CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; CmsProject project = null ; int result = 0 ; try { project = m_driverManager . readProject ( dbc , id ) ; checkManagerOfProjectRole ( dbc , project ) ; result = m_driverManager . countLockedResources ( project ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_COUNT_LOCKED_RESOURCES_PROJECT_2 , ( project == null ) ? "<failed to read>" : project . getName ( ) , id ) , e ) ; } finally { dbc . clear ( ) ; } return result ; |
public class OracleDialect { /** * insert into table ( id , name ) values ( seq . nextval , ? ) */
public void forModelSave ( Table table , Map < String , Object > attrs , StringBuilder sql , List < Object > paras ) { } } | sql . append ( "insert into " ) . append ( table . getName ( ) ) . append ( '(' ) ; StringBuilder temp = new StringBuilder ( ") values(" ) ; String [ ] pKeys = table . getPrimaryKey ( ) ; int count = 0 ; for ( Entry < String , Object > e : attrs . entrySet ( ) ) { String colName = e . getKey ( ) ; if ( table . hasColumnLabel ( colName ) ) { if ( count ++ > 0 ) { sql . append ( ", " ) ; temp . append ( ", " ) ; } sql . append ( colName ) ; Object value = e . getValue ( ) ; if ( value instanceof String && isPrimaryKey ( colName , pKeys ) && ( ( String ) value ) . endsWith ( ".nextval" ) ) { temp . append ( value ) ; } else { temp . append ( '?' ) ; paras . add ( value ) ; } } } sql . append ( temp . toString ( ) ) . append ( ')' ) ; |
public class AttackDetail { /** * The array of < a > AttackProperty < / a > objects .
* @ param attackProperties
* The array of < a > AttackProperty < / a > objects . */
public void setAttackProperties ( java . util . Collection < AttackProperty > attackProperties ) { } } | if ( attackProperties == null ) { this . attackProperties = null ; return ; } this . attackProperties = new java . util . ArrayList < AttackProperty > ( attackProperties ) ; |
public class Partitioner { /** * Get partitions with low and high water marks
* @ param previousWatermark previous water mark from metadata
* @ return map of partition intervals .
* map ' s key is interval begin time ( in format { @ link Partitioner # WATERMARKTIMEFORMAT } )
* map ' s value is interval end time ( in format { @ link Partitioner # WATERMARKTIMEFORMAT } ) */
@ Deprecated public HashMap < Long , Long > getPartitions ( long previousWatermark ) { } } | HashMap < Long , Long > defaultPartition = Maps . newHashMap ( ) ; if ( ! isWatermarkExists ( ) ) { defaultPartition . put ( ConfigurationKeys . DEFAULT_WATERMARK_VALUE , ConfigurationKeys . DEFAULT_WATERMARK_VALUE ) ; LOG . info ( "Watermark column or type not found - Default partition with low watermark and high watermark as " + ConfigurationKeys . DEFAULT_WATERMARK_VALUE ) ; return defaultPartition ; } ExtractType extractType = ExtractType . valueOf ( this . state . getProp ( ConfigurationKeys . SOURCE_QUERYBASED_EXTRACT_TYPE ) . toUpperCase ( ) ) ; WatermarkType watermarkType = WatermarkType . valueOf ( this . state . getProp ( ConfigurationKeys . SOURCE_QUERYBASED_WATERMARK_TYPE , ConfigurationKeys . DEFAULT_WATERMARK_TYPE ) . toUpperCase ( ) ) ; int interval = getUpdatedInterval ( this . state . getPropAsInt ( ConfigurationKeys . SOURCE_QUERYBASED_PARTITION_INTERVAL , 0 ) , extractType , watermarkType ) ; int sourceMaxAllowedPartitions = this . state . getPropAsInt ( ConfigurationKeys . SOURCE_MAX_NUMBER_OF_PARTITIONS , 0 ) ; int maxPartitions = ( sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions : ConfigurationKeys . DEFAULT_MAX_NUMBER_OF_PARTITIONS ) ; WatermarkPredicate watermark = new WatermarkPredicate ( null , watermarkType ) ; int deltaForNextWatermark = watermark . getDeltaNumForNextWatermark ( ) ; LOG . info ( "is watermark override: " + this . isWatermarkOverride ( ) ) ; LOG . info ( "is full extract: " + this . isFullDump ( ) ) ; long lowWatermark = this . getLowWatermark ( extractType , watermarkType , previousWatermark , deltaForNextWatermark ) ; long highWatermark = this . getHighWatermark ( extractType , watermarkType ) ; if ( lowWatermark == ConfigurationKeys . DEFAULT_WATERMARK_VALUE || highWatermark == ConfigurationKeys . DEFAULT_WATERMARK_VALUE ) { LOG . info ( "Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: " + lowWatermark + " and high watermark: " + highWatermark ) ; defaultPartition . put ( lowWatermark , highWatermark ) ; return defaultPartition ; } LOG . info ( "Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark + "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions ) ; return watermark . getPartitions ( lowWatermark , highWatermark , interval , maxPartitions ) ; |
public class CollectionSupport { /** * Prints a collection to the given output stream .
* @ param coll
* @ param out stream to print to
* @ param separator item separator */
public static void print ( Collection coll , PrintStream out , String separator ) { } } | out . print ( format ( coll , separator ) ) ; |
public class Resources { /** * Retrieve a double from bundle .
* @ param key the key of resource
* @ return the resource double
* @ throws MissingResourceException if the requested key is unknown */
public double getDouble ( String key ) throws MissingResourceException { } } | ResourceBundle bundle = getBundle ( ) ; String value = bundle . getString ( key ) ; try { return Double . parseDouble ( value ) ; } catch ( NumberFormatException nfe ) { throw new MissingResourceException ( "Expecting a double value but got " + value , "java.lang.String" , key ) ; } |
public class MapUtils { /** * Splits rawMap ' s entries into a number of chunk maps of max chunkSize elements
* @ param rawMap
* @ param chunkSize
* @ return */
public static List < Map < String , List < String > > > splitToChunksOfSize ( Map < String , List < String > > rawMap , int chunkSize ) { } } | List < Map < String , List < String > > > mapChunks = new LinkedList < Map < String , List < String > > > ( ) ; Set < Map . Entry < String , List < String > > > rawEntries = rawMap . entrySet ( ) ; Map < String , List < String > > currentChunk = new LinkedHashMap < String , List < String > > ( ) ; int rawEntryIndex = 0 ; for ( Map . Entry < String , List < String > > rawEntry : rawEntries ) { if ( rawEntryIndex % chunkSize == 0 ) { if ( currentChunk . size ( ) > 0 ) { mapChunks . add ( currentChunk ) ; } currentChunk = new LinkedHashMap < String , List < String > > ( ) ; } currentChunk . put ( rawEntry . getKey ( ) , rawEntry . getValue ( ) ) ; rawEntryIndex ++ ; if ( rawEntryIndex == rawMap . size ( ) ) { // finished iterating
mapChunks . add ( currentChunk ) ; } } return mapChunks ; |
public class XmlUtils { /** * Executes the specified namespace aware XPath query as a multiple node query ,
* returning the text values of the resulting list of
* nodes . */
public static List < String > selectNodeValues ( Node node , String xpathQuery , Map < String , String > namespaceUris ) { } } | List < Node > resultNodes = selectNodes ( node , xpathQuery , namespaceUris ) ; return extractNodeValues ( resultNodes ) ; |
public class LocalClientFactory { /** * Creates a client from information in the config map .
* @ param config the configuration
* @ param defaultIndexName the default index to use if not specified in the config
* @ return the ES client */
public JestClient createClient ( Map < String , String > config , String defaultIndexName ) { } } | JestClient client ; String indexName = config . get ( "client.index" ) ; // $ NON - NLS - 1 $
if ( indexName == null ) { indexName = defaultIndexName ; } client = createLocalClient ( config , indexName , defaultIndexName ) ; return client ; |
public class GenStrutsApp { /** * Returns a non - empty List of FormBeanModels that match the given form
* bean type . The < code > usesPageFlowScopedFormBean < / code > parameter can
* be used to get the FormBeanModel for either a page flow scoped bean
* ( < code > true < / code > ) , not flow scoped ( < code > false < / code > ) , or both
* ( < code > null < / code > ) .
* @ param formType the form bean class type to match
* @ param usesPageFlowScopedFormBean flag to indicate that the bean is
* page flow scoped . If null , return all FormBeanModels of the given type
* regardless of being flow scoped or not .
* @ return a non - empty List of FormBeanModels that match the given type */
List getMatchingFormBeans ( TypeDeclaration formType , Boolean usesPageFlowScopedFormBean ) { } } | // Use the actual type of form to create the name .
// This avoids conflicts if there are multiple forms using the
// ANY _ FORM _ CLASS _ NAME type .
String actualType = CompilerUtils . getLoadableName ( formType ) ; // See if the app already has a form - bean of this type . If so ,
// we ' ll just use it ; otherwise , we need to create it .
List formBeans = getFormBeansByActualType ( actualType , usesPageFlowScopedFormBean ) ; if ( formBeans == null ) { // if not indicated assume not flow scoped when adding a new bean
boolean isFlowScoped = false ; if ( usesPageFlowScopedFormBean != null ) { isFlowScoped = usesPageFlowScopedFormBean . booleanValue ( ) ; } FormBeanModel formBeanModel = addNewFormBean ( formType , isFlowScoped ) ; formBeans = new ArrayList ( ) ; formBeans . add ( formBeanModel ) ; } assert formBeans . size ( ) > 0 ; return formBeans ; |
public class AbstractBackend { /** * Notifies all registered listeners when the backend has been updated .
* @ param e a { @ link BackendUpdatedEvent } representing an update . */
protected void fireBackendUpdated ( E e ) { } } | for ( int i = 0 , n = this . listenerList . size ( ) ; i < n ; i ++ ) { this . listenerList . get ( i ) . backendUpdated ( e ) ; } |
public class SearchCriteria { /** * Appends the specified ordering criterion .
* @ param order
* the ordering criterion .
* @ throws NullPointerException
* if the argument is null . */
public SearchCriteria addOrder ( final Order order ) { } } | if ( order == null ) { throw new IllegalArgumentException ( "no order specified" ) ; } _orders . add ( order ) ; return this ; |
public class AWSParameterStoreConfigClient { /** * Calculates property names to look for .
* @ param prefix The prefix
* @ param activeNames active environment names
* @ return A set of calculated property names */
private Set < String > calcPropertySourceNames ( String prefix , Set < String > activeNames ) { } } | return ClientUtil . calcPropertySourceNames ( prefix , activeNames , "_" ) ; |
public class SeaGlassLookAndFeel { /** * Initialize button settings .
* @ param d the UI defaults map . */
private void defineButtons ( UIDefaults d ) { } } | d . put ( "Button.contentMargins" , new InsetsUIResource ( 6 , 14 , 6 , 14 ) ) ; d . put ( "Button.defaultButtonFollowsFocus" , Boolean . FALSE ) ; d . put ( "buttonBorderBaseEnabled" , new Color ( 0x709ad0 ) ) ; d . put ( "buttonBorderBasePressed" , new Color ( 0x4879bf ) ) ; d . put ( "buttonInteriorBaseEnabled" , new Color ( 0xd5e8f7 ) ) ; d . put ( "buttonInteriorBasePressed" , new Color ( 0x6d8fba ) ) ; d . put ( "buttonInteriorBaseSelected" , new Color ( 0x80a6d2 ) ) ; d . put ( "buttonInteriorBasePressedSelected" , new Color ( 0x7497c2 ) ) ; d . put ( "texturedButtonBorderBaseEnabled" , new Color ( 0x999999 ) ) ; d . put ( "texturedButtonInteriorBaseEnabled" , new Color ( 0xf0f0f0 ) ) ; d . put ( "texturedButtonInteriorBasePressed" , new Color ( 0x8eb3d2 ) ) ; d . put ( "texturedButtonInteriorBaseSelected" , new Color ( 0x98c1e2 ) ) ; d . put ( "texturedButtonInteriorBasePressedSelected" , new Color ( 0x7e9fba ) ) ; d . put ( "buttonBulletBottomEnabled" , Color . BLACK ) ; d . put ( "buttonArrow" , Color . BLACK ) ; String p = "Button" ; String c = PAINTER_PREFIX + "ButtonPainter" ; // Initialize Button
d . put ( p + ".States" , "Enabled,Pressed,Disabled,Focused,Default" ) ; d . put ( p + "[Default+Pressed].textForeground" , new ColorUIResource ( Color . black ) ) ; d . put ( p + "[Disabled].textForeground" , getDerivedColor ( "seaGlassDisabledText" , 0.0f , 0.0f , 0.0f , 0 , true ) ) ; d . put ( p + "[Default].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_DEFAULT ) ) ; d . put ( p + "[Default+Focused].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_DEFAULT_FOCUSED ) ) ; d . put ( p + "[Default+Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_DEFAULT ) ) ; d . put ( p + "[Default+Focused+Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_DEFAULT_FOCUSED ) ) ; d . put ( p + "[Disabled].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_DISABLED ) ) ; d . put ( p + "[Enabled].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_ENABLED ) ) ; d . put ( p + "[Focused].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_FOCUSED ) ) ; d . put ( p + "[Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED ) ) ; d . put ( p + "[Focused+Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_FOCUSED ) ) ; // Initialize ToggleButton
p = "ToggleButton" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 6 , 14 , 6 , 14 ) ) ; d . put ( p + ".States" , "Enabled,Pressed,Disabled,Focused,Selected" ) ; d . put ( p + "[Selected].textForeground" , new ColorUIResource ( Color . black ) ) ; d . put ( p + "[Disabled].textForeground" , getDerivedColor ( "seaGlassDisabledText" , 0.0f , 0.0f , 0.0f , 0 , true ) ) ; d . put ( p + "[Default+Pressed].textForeground" , new ColorUIResource ( Color . black ) ) ; d . put ( p + "[Focused+Selected].textForeground" , new ColorUIResource ( Color . black ) ) ; d . put ( p + "[Disabled+Selected].textForeground" , new ColorUIResource ( new Color ( 0 , 0 , 0 , 0x80 ) ) ) ; d . put ( p + "[Disabled].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_DISABLED ) ) ; d . put ( p + "[Enabled].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_ENABLED ) ) ; d . put ( p + "[Focused].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_FOCUSED ) ) ; d . put ( p + "[Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED ) ) ; d . put ( p + "[Focused+Pressed].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_FOCUSED ) ) ; d . put ( p + "[Selected].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_SELECTED ) ) ; d . put ( p + "[Focused+Selected].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_SELECTED_FOCUSED ) ) ; d . put ( p + "[Pressed+Selected].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_SELECTED ) ) ; d . put ( p + "[Focused+Pressed+Selected].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_PRESSED_SELECTED_FOCUSED ) ) ; d . put ( p + "[Disabled+Selected].backgroundPainter" , new LazyPainter ( c , ButtonPainter . Which . BACKGROUND_DISABLED_SELECTED ) ) ; // Initialize CheckBox
p = "CheckBox" ; c = PAINTER_PREFIX + "CheckBoxPainter" ; d . put ( p + ".States" , "Enabled,Pressed,Disabled,Focused,Selected" ) ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[Disabled].textForeground" , getDerivedColor ( "seaGlassDisabledText" , 0.0f , 0.0f , 0.0f , 0 , true ) ) ; d . put ( p + "[Disabled].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_DISABLED ) ) ; d . put ( p + "[Enabled].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_ENABLED ) ) ; d . put ( p + "[Focused].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_FOCUSED ) ) ; d . put ( p + "[Pressed].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_PRESSED ) ) ; d . put ( p + "[Focused+Pressed].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_PRESSED_FOCUSED ) ) ; d . put ( p + "[Selected].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_SELECTED ) ) ; d . put ( p + "[Focused+Selected].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_SELECTED_FOCUSED ) ) ; d . put ( p + "[Pressed+Selected].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_PRESSED_SELECTED ) ) ; d . put ( p + "[Focused+Pressed+Selected].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_PRESSED_SELECTED_FOCUSED ) ) ; d . put ( p + "[Disabled+Selected].iconPainter" , new LazyPainter ( c , CheckBoxPainter . Which . ICON_DISABLED_SELECTED ) ) ; d . put ( p + ".icon" , new SeaGlassIcon ( p , "iconPainter" , 18 , 18 ) ) ; // Initialize RadioButton
p = "RadioButton" ; c = PAINTER_PREFIX + "RadioButtonPainter" ; d . put ( p + ".States" , "Enabled,Pressed,Disabled,Focused,Selected" ) ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[Disabled].textForeground" , getDerivedColor ( "seaGlassDisabledText" , 0.0f , 0.0f , 0.0f , 0 , true ) ) ; d . put ( p + "[Disabled].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_DISABLED ) ) ; d . put ( p + "[Enabled].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_ENABLED ) ) ; d . put ( p + "[Focused].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_FOCUSED ) ) ; d . put ( p + "[Pressed].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_PRESSED ) ) ; d . put ( p + "[Focused+Pressed].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_PRESSED_FOCUSED ) ) ; d . put ( p + "[Selected].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_SELECTED ) ) ; d . put ( p + "[Focused+Selected].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_SELECTED_FOCUSED ) ) ; d . put ( p + "[Pressed+Selected].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_PRESSED_SELECTED ) ) ; d . put ( p + "[Focused+Pressed+Selected].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_PRESSED_SELECTED_FOCUSED ) ) ; d . put ( p + "[Disabled+Selected].iconPainter" , new LazyPainter ( c , RadioButtonPainter . Which . ICON_DISABLED_SELECTED ) ) ; d . put ( p + ".icon" , new SeaGlassIcon ( p , "iconPainter" , 18 , 18 ) ) ; |
public class MessageSelectorParser { /** * Static parse method taking care of test action description .
* @ param element
* @ param builder */
public static void doParse ( Element element , BeanDefinitionBuilder builder ) { } } | Element messageSelectorElement = DomUtils . getChildElementByTagName ( element , "selector" ) ; if ( messageSelectorElement != null ) { Element selectorStringElement = DomUtils . getChildElementByTagName ( messageSelectorElement , "value" ) ; if ( selectorStringElement != null ) { builder . addPropertyValue ( "messageSelector" , DomUtils . getTextValue ( selectorStringElement ) ) ; } Map < String , String > messageSelectorMap = new HashMap < > ( ) ; List < ? > messageSelectorElements = DomUtils . getChildElementsByTagName ( messageSelectorElement , "element" ) ; for ( Iterator < ? > iter = messageSelectorElements . iterator ( ) ; iter . hasNext ( ) ; ) { Element selectorElement = ( Element ) iter . next ( ) ; messageSelectorMap . put ( selectorElement . getAttribute ( "name" ) , selectorElement . getAttribute ( "value" ) ) ; } builder . addPropertyValue ( "messageSelectorMap" , messageSelectorMap ) ; } |
public class Functions { /** * Runs decode base 64 function with arguments .
* @ return */
public static String decodeBase64 ( String content , Charset charset , TestContext context ) { } } | return new DecodeBase64Function ( ) . execute ( Arrays . asList ( content , charset . displayName ( ) ) , context ) ; |
public class AbstractDatabaseEngine { /** * Executes the given query .
* @ param query The query to execute .
* @ throws DatabaseEngineException If something goes wrong executing the query . */
@ Override public synchronized List < Map < String , ResultColumn > > query ( final Expression query ) throws DatabaseEngineException { } } | return query ( translate ( query ) ) ; |
public class CommerceOrderNotePersistenceImpl { /** * Clears the cache for all commerce order notes .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( ) { } } | entityCache . clearCache ( CommerceOrderNoteImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; |
public class EbeanUpdater { /** * { @ inheritDoc } */
@ Override public Update < M > setNullParameter ( int position , int jdbcType ) { } } | return getUpdate ( ) . setNullParameter ( position , jdbcType ) ; |
public class CmsSearchReplaceSettings { /** * Sets the xpath . < p >
* @ param xpath the xpath to set */
public void setXpath ( String xpath ) { } } | if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( xpath ) ) { xpath = xpath . trim ( ) ; if ( xpath . startsWith ( "/" ) ) { xpath = xpath . substring ( 1 ) ; } if ( xpath . endsWith ( "/" ) ) { xpath = xpath . substring ( 0 , xpath . length ( ) - 1 ) ; } } m_xpath = xpath ; |
public class CmsGalleriesTab { /** * De - selects the galleries in the galleries list . < p >
* @ param galleries the galleries to deselect */
public void uncheckGalleries ( List < String > galleries ) { } } | for ( String gallery : galleries ) { CmsListItem item = searchTreeItem ( m_scrollList , gallery ) ; if ( item != null ) { item . getCheckBox ( ) . setChecked ( false ) ; } } |
public class JavacState { /** * Delete all prev artifacts in the currently tainted packages . */
public void deleteClassArtifactsInTaintedPackages ( ) { } } | for ( String pkg : taintedPackages ) { Map < String , File > arts = fetchPrevArtifacts ( pkg ) ; for ( File f : arts . values ( ) ) { if ( f . exists ( ) && f . getName ( ) . endsWith ( ".class" ) ) { f . delete ( ) ; } } } |
public class BoltNeo4jResourceLocalTransactionCoordinator { /** * PhysicalTransactionDelegate ~ ~ ~ ~ ~ */
private void afterBeginCallback ( ) { } } | if ( this . timeOut > 0 ) { owner . setTransactionTimeOut ( this . timeOut ) ; } owner . afterTransactionBegin ( ) ; for ( TransactionObserver observer : observers ) { observer . afterBegin ( ) ; } log . trace ( "ResourceLocalTransactionCoordinatorImpl#afterBeginCallback" ) ; |
public class BouncyCastleUtil { /** * Creates a < code > ProxyCertInfo < / code > object from given
* extension .
* @ param ext the extension .
* @ return the < code > ProxyCertInfo < / code > object .
* @ exception IOException if something fails . */
public static ProxyCertInfo getProxyCertInfo ( X509Extension ext ) throws IOException { } } | return ProxyCertInfo . getInstance ( BouncyCastleUtil . getExtensionObject ( ext ) ) ; |
public class AbstractCompositeHandler { /** * { @ inheritDoc } */
public UpdateResult whenSQLUpdate ( final String sql , final List < Parameter > parameters ) throws SQLException { } } | if ( this . updateHandler == null ) { throw new SQLException ( "No update handler: " + sql ) ; } // end of if
return this . updateHandler . apply ( sql , parameters ) ; |
public class Nfs3 { /** * ( non - Javadoc )
* @ see
* com . emc . ecs . nfsclient . nfs . Nfs # wrapped _ sendLink ( com . emc . ecs . nfsclient . nfs .
* NfsLinkRequest ) */
public Nfs3LinkResponse wrapped_sendLink ( NfsLinkRequest request ) throws IOException { } } | NfsResponseHandler < Nfs3LinkResponse > responseHandler = new NfsResponseHandler < Nfs3LinkResponse > ( ) { /* ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . rpc . RpcResponseHandler # makeNewResponse ( ) */
protected Nfs3LinkResponse makeNewResponse ( ) { return new Nfs3LinkResponse ( ) ; } } ; _rpcWrapper . callRpcWrapped ( request , responseHandler ) ; return responseHandler . getResponse ( ) ; |
public class LightweightTypeReference { /** * / * @ Nullable */
protected List < LightweightTypeReference > getNonInterfaceTypes ( List < LightweightTypeReference > components ) { } } | List < LightweightTypeReference > nonInterfaceTypes = null ; for ( LightweightTypeReference component : components ) { if ( ! component . isInterfaceType ( ) ) { if ( nonInterfaceTypes == null ) { nonInterfaceTypes = Collections . singletonList ( component ) ; } else if ( nonInterfaceTypes . size ( ) == 1 ) { nonInterfaceTypes = Lists . newArrayList ( nonInterfaceTypes ) ; nonInterfaceTypes . add ( component ) ; } else { nonInterfaceTypes . add ( component ) ; } } } return nonInterfaceTypes ; |
public class CompletedCheckpointStatsSummary { /** * Updates the summary with the given completed checkpoint .
* @ param completed Completed checkpoint to update the summary with . */
void updateSummary ( CompletedCheckpointStats completed ) { } } | stateSize . add ( completed . getStateSize ( ) ) ; duration . add ( completed . getEndToEndDuration ( ) ) ; alignmentBuffered . add ( completed . getAlignmentBuffered ( ) ) ; |
public class MessageTemplate { /** * Returns the { @ link MessageBroker } for routing messages
* @ return the message broker */
public MessageBroker getMessageBroker ( ) { } } | if ( this . messageBroker != null ) { return this . messageBroker ; } Assert . notNull ( FlexContext . getMessageBroker ( ) , "A MessageBroker was not set on the MessageTemplate " + "and no thread-local MessageBroker could be found in the FlexContext." ) ; return FlexContext . getMessageBroker ( ) ; |
public class AccessClassLoader { /** * interface . " */
static boolean areInSameRuntimeClassLoader ( Class type1 , Class type2 ) { } } | if ( type1 . getPackage ( ) != type2 . getPackage ( ) ) { return false ; } ClassLoader loader1 = type1 . getClassLoader ( ) ; ClassLoader loader2 = type2 . getClassLoader ( ) ; ClassLoader systemClassLoader = ClassLoader . getSystemClassLoader ( ) ; if ( loader1 == null ) { return ( loader2 == null || loader2 == systemClassLoader ) ; } if ( loader2 == null ) return loader1 == systemClassLoader ; return loader1 == loader2 ; |
public class SipComponentProcessor { /** * Gets all classes that are eligible for injection etc
* @ param metaData
* @ return */
private Set < String > getAllComponentClasses ( DeploymentUnit deploymentUnit , CompositeIndex index , SipMetaData sipMetaData , SipAnnotationMetaData sipAnnotationMetaData ) { } } | final Set < String > classes = new HashSet < String > ( ) ; if ( sipAnnotationMetaData != null ) { for ( Map . Entry < String , SipMetaData > metaData : sipAnnotationMetaData . entrySet ( ) ) { getAllComponentClasses ( metaData . getValue ( ) , classes ) ; } } // if ( metaData . getAnnotationsMetaData ( ) ! = null )
// for ( Map . Entry < String , WebMetaData > webMetaData : metaData . getAnnotationsMetaData ( ) . entrySet ( ) ) {
// getAllComponentClasses ( webMetaData . getValue ( ) , classes ) ;
// if ( metaData . getSharedWebMetaData ( ) ! = null )
// getAllComponentClasses ( metaData . getSharedWebMetaData ( ) , classes ) ;
// if ( metaData . getWebFragmentsMetaData ( ) ! = null )
// for ( Map . Entry < String , WebFragmentMetaData > webMetaData : metaData . getWebFragmentsMetaData ( ) . entrySet ( ) ) {
// getAllComponentClasses ( webMetaData . getValue ( ) , classes ) ;
if ( sipMetaData != null ) { getAllComponentClasses ( sipMetaData , classes ) ; } // if ( metaData . getWebMetaData ( ) ! = null )
// getAllComponentClasses ( metaData . getWebMetaData ( ) , classes ) ;
// if ( tldsMetaData = = null )
// return classes ;
// if ( tldsMetaData . getSharedTlds ( deploymentUnit ) ! = null )
// for ( TldMetaData tldMetaData : tldsMetaData . getSharedTlds ( deploymentUnit ) ) {
// getAllComponentClasses ( tldMetaData , classes ) ;
// if ( tldsMetaData . getTlds ( ) ! = null )
// for ( Map . Entry < String , TldMetaData > tldMetaData : tldsMetaData . getTlds ( ) . entrySet ( ) ) {
// getAllComponentClasses ( tldMetaData . getValue ( ) , classes ) ;
// getAllAsyncListenerClasses ( index , classes ) ;
return classes ; |
public class LogSignAlgebra { /** * Converts a compacted number to its real value . */
@ Override public double toReal ( double x ) { } } | double unsignedReal = FastMath . exp ( natlog ( x ) ) ; return ( sign ( x ) == POSITIVE ) ? unsignedReal : - unsignedReal ; |
public class DateUtilExtensions { /** * Decrement a Calendar by one day .
* @ param self a Calendar
* @ return a new Calendar set to the previous day
* @ since 1.8.7 */
public static Calendar previous ( Calendar self ) { } } | Calendar result = ( Calendar ) self . clone ( ) ; result . add ( Calendar . DATE , - 1 ) ; return result ; |
public class AccountsApi { /** * Delete an existing account custom field .
* @ param accountId The external account number ( int ) or account ID Guid . ( required )
* @ param customFieldId ( required )
* @ return void */
public void deleteCustomField ( String accountId , String customFieldId ) throws ApiException { } } | deleteCustomField ( accountId , customFieldId , null ) ; |
public class Packer { /** * Get native int ( fixed length )
* @ return
* @ see # putInt ( int ) */
public int getInt ( ) { } } | int v = 0 ; v |= ( ( getByte ( ) & 0xFF ) << 24 ) ; v |= ( ( getByte ( ) & 0xFF ) << 16 ) ; v |= ( ( getByte ( ) & 0xFF ) << 8 ) ; v |= ( getByte ( ) & 0xFF ) ; return v ; |
public class Common { /** * Initialize the { @ link CurrencyManager } */
public void initializeCurrency ( ) { } } | if ( ! currencyInitialized ) { sendConsoleMessage ( Level . INFO , getLanguageManager ( ) . getString ( "loading_currency_manager" ) ) ; currencyManager = new CurrencyManager ( ) ; currencyInitialized = true ; sendConsoleMessage ( Level . INFO , getLanguageManager ( ) . getString ( "currency_manager_loaded" ) ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.