signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ScanAdapter { /** * { @ inheritDoc } */
@ Override public void adapt ( Scan scan , ReadHooks readHooks , Query query ) { } } | throwIfUnsupportedScan ( scan ) ; toByteStringRange ( scan , query ) ; query . filter ( buildFilter ( scan , readHooks ) ) ; if ( LIMIT_AVAILABLE && scan . getLimit ( ) > 0 ) { query . limit ( scan . getLimit ( ) ) ; } |
public class ItemizedOverlayControlView { private void initViewListeners ( ) { } } | this . mNextButton . setOnClickListener ( new OnClickListener ( ) { @ Override public void onClick ( final View v ) { if ( ItemizedOverlayControlView . this . mLis != null ) ItemizedOverlayControlView . this . mLis . onNext ( ) ; } } ) ; this . mPreviousButton . setOnClickListener ( new OnClickListener ( ) { @ Override public void onClick ( final View v ) { if ( ItemizedOverlayControlView . this . mLis != null ) ItemizedOverlayControlView . this . mLis . onPrevious ( ) ; } } ) ; this . mCenterToButton . setOnClickListener ( new OnClickListener ( ) { @ Override public void onClick ( final View v ) { if ( ItemizedOverlayControlView . this . mLis != null ) ItemizedOverlayControlView . this . mLis . onCenter ( ) ; } } ) ; this . mNavToButton . setOnClickListener ( new OnClickListener ( ) { @ Override public void onClick ( final View v ) { if ( ItemizedOverlayControlView . this . mLis != null ) ItemizedOverlayControlView . this . mLis . onNavTo ( ) ; } } ) ; |
public class Dependencies { /** * Visit the api of a class and construct a pubapi string and
* store it into the pubapi _ perclass map . */
public void visitPubapi ( Element e ) { } } | Name n = ( ( ClassSymbol ) e ) . fullname ; Name p = ( ( ClassSymbol ) e ) . packge ( ) . fullname ; StringBuffer sb = publicApiPerClass . get ( n ) ; assert ( sb == null ) ; sb = new StringBuffer ( ) ; PubapiVisitor v = new PubapiVisitor ( sb ) ; v . visit ( e ) ; if ( sb . length ( ) > 0 ) { publicApiPerClass . put ( n , sb ) ; } explicitPackages . add ( p ) ; |
public class InjectionBinding { /** * Merges the value of an integer annotation value .
* < p > If an error occurs , { @ link # mergeError } will be called , which
* requires { @ link # getJNDIEnvironmentRefType } to be defined .
* @ param oldValue the old value
* @ param oldValueXML true if the old value was set by XML
* @ param newValue the new value
* @ param elementName the annotation element name
* @ param defaultValue the default value as specified in the annotation
* @ return the merged value
* @ throws InjectionConfigurationException if an error occurs */
protected Integer mergeAnnotationInteger ( Integer oldValue , boolean oldValueXML , int newValue , String elementName , int defaultValue , Map < Integer , String > valueNames ) throws InjectionConfigurationException { } } | if ( newValue == defaultValue ) { return oldValue ; } if ( oldValueXML ) { return oldValue ; } if ( oldValue == null ? isComplete ( ) : ! oldValue . equals ( newValue ) ) { Object oldValueName = valueNames == null ? oldValue : valueNames . get ( oldValue ) ; Object newValueName = valueNames == null ? newValue : valueNames . get ( newValue ) ; mergeError ( oldValueName , newValueName , false , elementName , false , elementName ) ; return oldValue ; } return newValue ; |
public class Neighbours { /** * Creates a BusGroup with the given name .
* As this is a new Bus , the current subscription state in
* the MatchSpace is read and assigned to the Bus as active
* subscriptions .
* @ param busId The name of the Busgroup
* @ return The new BusGroup object representing the bus . */
private BusGroup createBus ( String busId ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createBus" , busId ) ; // Create a new group and add it to the list .
boolean isLocalBus = busId . equals ( _localBusName ) ; final BusGroup group = new BusGroup ( busId , _proxyHandler , isLocalBus ) ; final BusGroup [ ] tempBuses = _buses ; _buses = new BusGroup [ tempBuses . length + 1 ] ; System . arraycopy ( tempBuses , 0 , _buses , 0 , tempBuses . length ) ; _buses [ tempBuses . length ] = group ; // Do not need to propogate local subs to foreign buses
// addSubscriptionsToBus ( group , busId ) ;
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createBus" , group ) ; return group ; |
public class BinderParamBuilder { /** * Returns the { @ link ClassName } for the Stub */
private ClassName getStubClassName ( TypeMirror param ) { } } | Element element = getBindingManager ( ) . getElement ( param . toString ( ) ) ; return ClassName . get ( getPackage ( element ) . getQualifiedName ( ) . toString ( ) , element . getSimpleName ( ) + ClassBuilder . STUB_SUFFIX ) ; |
public class TopologyBuilder { /** * Define a new bolt in this topology . This defines a windowed bolt , intended
* for windowing operations . The { @ link IWindowedBolt # execute ( TupleWindow ) } method
* is triggered for each window interval with the list of current events in the window .
* @ param id the id of this component . This id is referenced by other components that want to consume this bolt ' s outputs .
* @ param bolt the windowed bolt
* @ param parallelism _ hint the number of tasks that should be assigned to execute this bolt . Each task will run on a thread in a process somwehere around the cluster .
* @ return use the returned object to declare the inputs to this component
* @ throws IllegalArgumentException if { @ code parallelism _ hint } is not positive */
public BoltDeclarer setBolt ( String id , IWindowedBolt bolt , Number parallelism_hint ) throws IllegalArgumentException { } } | return setBolt ( id , new backtype . storm . topology . WindowedBoltExecutor ( bolt ) , parallelism_hint ) ; |
public class InternalSimpleExpressionsLexer { /** * $ ANTLR start " RULE _ INT " */
public final void mRULE_INT ( ) throws RecognitionException { } } | try { int _type = RULE_INT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSimpleExpressions . g : 802:10 : ( ( ' 0 ' . . ' 9 ' ) + )
// InternalSimpleExpressions . g : 802:12 : ( ' 0 ' . . ' 9 ' ) +
{ // InternalSimpleExpressions . g : 802:12 : ( ' 0 ' . . ' 9 ' ) +
int cnt3 = 0 ; loop3 : do { int alt3 = 2 ; int LA3_0 = input . LA ( 1 ) ; if ( ( ( LA3_0 >= '0' && LA3_0 <= '9' ) ) ) { alt3 = 1 ; } switch ( alt3 ) { case 1 : // InternalSimpleExpressions . g : 802:13 : ' 0 ' . . ' 9'
{ matchRange ( '0' , '9' ) ; } break ; default : if ( cnt3 >= 1 ) break loop3 ; EarlyExitException eee = new EarlyExitException ( 3 , input ) ; throw eee ; } cnt3 ++ ; } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class PdfContentReaderTool { /** * Shows the detail of a dictionary .
* @ param dicthe dictionary of which you want the detail
* @ param depththe depth of the current dictionary ( for nested dictionaries )
* @ returna String representation of the dictionary */
static public String getDictionaryDetail ( PdfDictionary dic , int depth ) { } } | StringBuffer builder = new StringBuffer ( ) ; builder . append ( '(' ) ; List subDictionaries = new ArrayList ( ) ; for ( Iterator i = dic . getKeys ( ) . iterator ( ) ; i . hasNext ( ) ; ) { PdfName key = ( PdfName ) i . next ( ) ; PdfObject val = dic . getDirectObject ( key ) ; if ( val . isDictionary ( ) ) subDictionaries . add ( key ) ; builder . append ( key ) ; builder . append ( '=' ) ; builder . append ( val ) ; builder . append ( ", " ) ; } builder . setLength ( builder . length ( ) - 2 ) ; builder . append ( ')' ) ; PdfName pdfSubDictionaryName ; for ( Iterator it = subDictionaries . iterator ( ) ; it . hasNext ( ) ; ) { pdfSubDictionaryName = ( PdfName ) it . next ( ) ; builder . append ( '\n' ) ; for ( int i = 0 ; i < depth + 1 ; i ++ ) { builder . append ( '\t' ) ; } builder . append ( "Subdictionary " ) ; builder . append ( pdfSubDictionaryName ) ; builder . append ( " = " ) ; builder . append ( getDictionaryDetail ( dic . getAsDict ( pdfSubDictionaryName ) , depth + 1 ) ) ; } return builder . toString ( ) ; |
public class JdbcTarget { /** * Construct insert query using property names and database table name .
* @ return insert query */
private String createInsertQuery ( ) { } } | StringBuilder query = new StringBuilder ( ) ; query . append ( "insert into " ) . append ( m_tableName ) . append ( "(" ) . append ( m_propertyNames [ 0 ] ) ; for ( int i = 1 ; i < m_propertyNames . length ; i ++ ) { query . append ( "," ) . append ( m_propertyNames [ i ] ) ; } query . append ( ") values (?" ) ; for ( int i = 1 ; i < m_propertyNames . length ; i ++ ) { query . append ( ",?" ) ; } query . append ( ")" ) ; return query . toString ( ) ; |
public class EurekaBootStrap { /** * Handles Eureka cleanup , including shutting down all monitors and yielding all EIPs .
* @ see javax . servlet . ServletContextListener # contextDestroyed ( javax . servlet . ServletContextEvent ) */
@ Override public void contextDestroyed ( ServletContextEvent event ) { } } | try { logger . info ( "{} Shutting down Eureka Server.." , new Date ( ) ) ; ServletContext sc = event . getServletContext ( ) ; sc . removeAttribute ( EurekaServerContext . class . getName ( ) ) ; destroyEurekaServerContext ( ) ; destroyEurekaEnvironment ( ) ; } catch ( Throwable e ) { logger . error ( "Error shutting down eureka" , e ) ; } logger . info ( "{} Eureka Service is now shutdown..." , new Date ( ) ) ; |
public class SignalRsInner { /** * Handles requests to list all resources in a resource group .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; SignalRResourceInner & gt ; object */
public Observable < Page < SignalRResourceInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < SignalRResourceInner > > , Page < SignalRResourceInner > > ( ) { @ Override public Page < SignalRResourceInner > call ( ServiceResponse < Page < SignalRResourceInner > > response ) { return response . body ( ) ; } } ) ; |
public class ConnecClient { /** * Update an entity remotely
* @ param entity
* name
* @ param groupId
* customer group id
* @ param entityId
* id of the entity to retrieve
* @ param jsonStr
* entity attributes to update
* @ param httpClient
* @ return updated entity
* @ throws AuthenticationException
* @ throws ApiException
* @ throws InvalidRequestException */
public Map < String , Object > update ( String entityName , String groupId , String entityId , String jsonStr , MnoHttpClient httpClient ) throws AuthenticationException , ApiException , InvalidRequestException { } } | String jsonBody = httpClient . put ( getInstanceUrl ( entityName , groupId , entityId ) , jsonStr ) ; Type typeOfHashMap = HashMap . class ; return GSON . fromJson ( jsonBody , typeOfHashMap ) ; |
public class RulesApplier { /** * A recursive method that iterates the sentence given a base token group ( sentence or chunk ) . Used to
* match general and phrase local rules .
* @ param mistakes
* a list of mistakes found in the process of checking the sentence
* @ param currentStates
* the applier will check if these states match the current token
* @ param tokenGroup
* can be a sentence or a chunk ( classes that implement the interface TokenGroup )
* @ param baseTokenIndex
* the index of the token in which the process of searching for mistakes began
* @ param currentTokenIndex
* the index of the current token group
* @ param sentence
* the complete sentence , used to get the location of the mistake counted by chars inside the
* sentence
* @ param dictionary
* a word and tag dictionary .
* @ return the mistakes in the parameter < code > mistakes < / code > plus the mistakes found in this
* invocation , if any */
private List < Mistake > getMistakes ( List < Mistake > mistakes , List < State > currentStates , TokenGroup tokenGroup , int baseTokenIndex , int currentTokenIndex , ArrayList < Token > matched , Sentence sentence ) { } } | int offset = 0 ; if ( tokenGroup instanceof Chunk ) { offset = ( ( Chunk ) tokenGroup ) . getFirstToken ( ) ; } for ( State state : currentStates ) { PatternElement patternElement = state . getElement ( ) ; Token token = tokenGroup . getTokens ( ) . get ( currentTokenIndex ) ; boolean tokenAndElementMatched = this . match ( token , patternElement , baseTokenIndex + offset , sentence ) ; if ( tokenAndElementMatched ) { // need to clone because due to recursive implementation
ArrayList < Token > matchedClone = cloneList ( matched ) ; matchedClone . add ( token ) ; if ( state instanceof AcceptState ) { // Got a mistake !
Rule rule = ( ( AcceptState ) state ) . getRule ( ) ; // The mistake is located between the tokens indicated by lower and upper .
int lower = baseTokenIndex + rule . getBoundaries ( ) . getLower ( ) ; int upper = currentTokenIndex + rule . getBoundaries ( ) . getUpper ( ) ; lower += offset ; upper += offset ; // Pointing the mistake location using the chars in the sentence .
int lowerCountedByChars = sentence . getTokens ( ) . get ( lower ) . getSpan ( ) . getStart ( ) ; int upperCountedByChars = sentence . getTokens ( ) . get ( upper ) . getSpan ( ) . getEnd ( ) ; // Suggestions .
String [ ] suggestions = new String [ 0 ] ; Token next = null ; if ( tokenGroup . getTokens ( ) . size ( ) > currentTokenIndex + 1 ) { next = tokenGroup . getTokens ( ) . get ( currentTokenIndex + 1 ) ; } try { suggestions = suggestionBuilder . getTokenSuggestions ( sentence , matchedClone , next , rule ) ; } catch ( NullPointerException e ) { LOGGER . error ( "Failed to apply rule " + rule . getId ( ) + " in: " + sentence . getSentence ( ) , e ) ; } Mistake mistake = new MistakeImpl ( ID_PREFIX + rule . getId ( ) , getPriority ( rule ) , rule . getMessage ( ) , rule . getShortMessage ( ) , suggestions , lowerCountedByChars + sentence . getOffset ( ) , upperCountedByChars + sentence . getOffset ( ) , rule . getExample ( ) , sentence . getDocumentText ( ) ) ; mistakes . add ( mistake ) ; } else if ( currentTokenIndex + 1 < tokenGroup . getTokens ( ) . size ( ) ) { // Keep looking : recurse .
this . getMistakes ( mistakes , state . getNextStates ( ) , tokenGroup , baseTokenIndex , currentTokenIndex + 1 , matchedClone , sentence ) ; } } else if ( isOptional ( patternElement ) ) { // it is valid only if the next is valid here !
// just keep looking without movin to the next token
this . getMistakes ( mistakes , state . getNextStates ( ) , tokenGroup , baseTokenIndex , currentTokenIndex , matched , sentence ) ; } } return mistakes ; |
public class AddTask { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . msgstore . cache . xalist . Task # commitStage1 ( com . ibm . ws . sib . msgstore . Transaction ) */
public final void commitInternal ( final PersistentTransaction transaction ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "commitInternal" , transaction ) ; getLink ( ) . internalCommitAdd ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "commitInternal" ) ; |
public class KinesisDataFetcher { /** * Utility function to convert { @ link StreamShardMetadata } into { @ link StreamShardHandle } .
* @ param streamShardMetadata the { @ link StreamShardMetadata } to be converted
* @ return a { @ link StreamShardHandle } object */
public static StreamShardHandle convertToStreamShardHandle ( StreamShardMetadata streamShardMetadata ) { } } | Shard shard = new Shard ( ) ; shard . withShardId ( streamShardMetadata . getShardId ( ) ) ; shard . withParentShardId ( streamShardMetadata . getParentShardId ( ) ) ; shard . withAdjacentParentShardId ( streamShardMetadata . getAdjacentParentShardId ( ) ) ; HashKeyRange hashKeyRange = new HashKeyRange ( ) ; hashKeyRange . withStartingHashKey ( streamShardMetadata . getStartingHashKey ( ) ) ; hashKeyRange . withEndingHashKey ( streamShardMetadata . getEndingHashKey ( ) ) ; shard . withHashKeyRange ( hashKeyRange ) ; SequenceNumberRange sequenceNumberRange = new SequenceNumberRange ( ) ; sequenceNumberRange . withStartingSequenceNumber ( streamShardMetadata . getStartingSequenceNumber ( ) ) ; sequenceNumberRange . withEndingSequenceNumber ( streamShardMetadata . getEndingSequenceNumber ( ) ) ; shard . withSequenceNumberRange ( sequenceNumberRange ) ; return new StreamShardHandle ( streamShardMetadata . getStreamName ( ) , shard ) ; |
public class TruncatableThrowable { /** * Unfortunately , toString ( ) isn ' t good enough since we may have elements in the stack representing
* internal classes , and they don ' t toString ( ) well . */
public static String printStackTraceElement ( StackTraceElement stackTraceElement ) { } } | final String toString ; if ( stackTraceElement == null ) { toString = "at null" ; } // Look for our eyecatcher representing a truncation
else if ( TRIMMED_STACK_TRACE_ELEMENT . equals ( stackTraceElement ) ) { // Use only the class name , which is actually [ internal classes ]
toString = "at " + stackTraceElement . getClassName ( ) ; // Look for our eyecatcher representing a the stripping of redundant cause frames
} else if ( DUPLICATE_FRAMES_EYECATCHER . equals ( stackTraceElement . getMethodName ( ) ) ) { // Use only the class name , which will include the . . . - we don ' t want a " at " in this case
toString = stackTraceElement . getClassName ( ) ; } else { // The ' at ' is a non - internationalised string in what we ' re trying to imitate
toString = "at " + stackTraceElement . toString ( ) ; } return toString ; |
public class InternalSARLLexer { /** * $ ANTLR start " T _ _ 108" */
public final void mT__108 ( ) throws RecognitionException { } } | try { int _type = T__108 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalSARL . g : 94:8 : ( ' - = ' )
// InternalSARL . g : 94:10 : ' - = '
{ match ( "-=" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class RestRequestValidator { /** * Prints a debug log message that details the time taken for the Http
* request to be parsed by the coordinator
* @ param operationType
* @ param receivedTimeInMs */
protected void debugLog ( String operationType , Long receivedTimeInMs ) { } } | long durationInMs = receivedTimeInMs - ( this . parsedRequestOriginTimeInMs ) ; int numVectorClockEntries = ( this . parsedVectorClock == null ? 0 : this . parsedVectorClock . getVersionMap ( ) . size ( ) ) ; logger . debug ( "Received a new request. Operation type: " + operationType + " , Key(s): " + keysHexString ( this . parsedKeys ) + " , Store: " + this . storeName + " , Origin time (in ms): " + ( this . parsedRequestOriginTimeInMs ) + " , Request received at time(in ms): " + receivedTimeInMs + " , Num vector clock entries: " + numVectorClockEntries + " , Duration from RESTClient to CoordinatorRestRequestValidator(in ms): " + durationInMs ) ; |
public class VNode { /** * Chooses a proper bound for this VNode according to its class . */
public void setSizeAccordingToClass ( ) { } } | String glyphClass = this . glyph . getClazz ( ) ; // If glyph class is not specified return here
if ( glyphClass . equalsIgnoreCase ( NONE ) ) return ; if ( glyphClass . equalsIgnoreCase ( SOURCE_AND_SINK ) ) { setBounds ( SOURCE_AND_SINK_BOUND . getWidth ( ) , SOURCE_AND_SINK_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( AND ) || glyphClass . equalsIgnoreCase ( OR ) || glyphClass . equalsIgnoreCase ( NOT ) ) { setBounds ( LOGICAL_OPERATOR_BOUND . getWidth ( ) , LOGICAL_OPERATOR_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( ASSOCIATION ) || glyphClass . equalsIgnoreCase ( DISSOCIATION ) || glyphClass . equalsIgnoreCase ( OMITTED_PROCESS ) || glyphClass . equalsIgnoreCase ( UNCERTAIN_PROCESS ) || glyphClass . equalsIgnoreCase ( PROCESS ) ) { setBounds ( PROCESS_NODES_BOUND . getWidth ( ) , PROCESS_NODES_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( SIMPLE_CHEMICAL ) ) { setBounds ( SIMPLE_CHEMICAL_BOUND . getWidth ( ) , SIMPLE_CHEMICAL_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( UNSPECIFIED_ENTITY ) ) { setBounds ( UNSPECIFIED_ENTITY_BOUND . getWidth ( ) , UNSPECIFIED_ENTITY_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( MACROMOLECULE ) ) { setBounds ( MACROMOLECULE_BOUND . getWidth ( ) , MACROMOLECULE_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( NUCLEIC_ACID_FEATURE ) ) { setBounds ( NUCLEIC_ACID_FEATURE_BOUND . getWidth ( ) , NUCLEIC_ACID_FEATURE_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( STATE_VARIABLE ) ) { setBounds ( STATE_BOUND . getWidth ( ) , STATE_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( UNIT_OF_INFORMATION ) ) { setBounds ( INFO_BOUND . getWidth ( ) , INFO_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( PHENOTYPE ) ) { setBounds ( PHENOTYPE_BOUND . getWidth ( ) , PHENOTYPE_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( PERTURBING_AGENT ) ) { setBounds ( PERTURBING_AGENT_BOUND . getWidth ( ) , PERTURBING_AGENT_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( TAG ) ) { setBounds ( TAG_BOUND . getWidth ( ) , TAG_BOUND . getHeight ( ) ) ; } else if ( glyphClass . equalsIgnoreCase ( COMPLEX ) ) { setBounds ( COMPLEX_BOUND . getWidth ( ) , COMPLEX_BOUND . getHeight ( ) ) ; } if ( this . glyph . getClone ( ) != null ) { Bbox glyphBbox = this . glyph . getBbox ( ) ; setBounds ( 3 * glyphBbox . getW ( ) / 4 , 3 * glyphBbox . getH ( ) / 4 ) ; } if ( glyphClass . equalsIgnoreCase ( MACROMOLECULE ) || glyphClass . equalsIgnoreCase ( NUCLEIC_ACID_FEATURE ) || glyphClass . equalsIgnoreCase ( SIMPLE_CHEMICAL ) || glyphClass . equalsIgnoreCase ( COMPLEX ) ) { updateSizeForStateAndInfo ( ) ; } |
public class RegExpImpl { /** * Analog of match _ glob ( ) in jsstr . c */
private static void match_glob ( GlobData mdata , Context cx , Scriptable scope , int count , RegExpImpl reImpl ) { } } | if ( mdata . arrayobj == null ) { mdata . arrayobj = cx . newArray ( scope , 0 ) ; } SubString matchsub = reImpl . lastMatch ; String matchstr = matchsub . toString ( ) ; mdata . arrayobj . put ( count , mdata . arrayobj , matchstr ) ; |
public class Conjunction { /** * Appends another conjunction to this conjunction . */
public Conjunction append ( IConjunct conjunct ) { } } | for ( Iterator < IDisjunct > disjuncts = conjunct . getDisjuncts ( ) ; disjuncts . hasNext ( ) ; add ( disjuncts . next ( ) ) ) ; return this ; |
public class Vector2dfx { /** * Replies the property that represents the length of the vector .
* @ return the length property */
public ReadOnlyDoubleProperty lengthSquaredProperty ( ) { } } | if ( this . lengthSquareProperty == null ) { this . lengthSquareProperty = new ReadOnlyDoubleWrapper ( this , MathFXAttributeNames . LENGTH_SQUARED ) ; this . lengthSquareProperty . bind ( Bindings . createDoubleBinding ( ( ) -> Vector2dfx . this . x . doubleValue ( ) * Vector2dfx . this . x . doubleValue ( ) + Vector2dfx . this . y . doubleValue ( ) * Vector2dfx . this . y . doubleValue ( ) , this . x , this . y ) ) ; } return this . lengthSquareProperty . getReadOnlyProperty ( ) ; |
public class BuzzHash { /** * are the same . */
public static final long computeHashCode ( String str , boolean modifyAlgorithm ) { } } | // d179573 Ends
// to avoid EBCDIC / ASCII / Unicdoe encoding difference ( i . e . z / OS390)
// uses toCharArray ( ) instead of getBytes ( ) .
char chars [ ] = str . toCharArray ( ) ; // Corrected the algorithm to more closely match the ' buzHash '
// algorithm it is patterned from and produce both a 64 and 32
// bit value ( 64 bit value may be better ) .
long h = initial_hash ; for ( int i = 0 ; i < chars . length ; ++ i ) { // d179573 Begins
char thisChar = chars [ i ] ; if ( modifyAlgorithm ) { // for all recurrences of the same character in a multiple 64 positions
// before the current index , need to mix / scramble the hash to another iteration .
// d183360 Begins
int j = i ; while ( j > 0 ) { if ( j >= 64 && thisChar == chars [ j - 64 ] ) { thisChar ^= mix_master [ thisChar & 0xff ] ; } j -= 64 ; } // d183360 Ends
} h = ( ( h << 1 ) | ( h >>> 63 ) ) ^ mix_master [ thisChar & 0xff ] ; // d179573 Ends
} return h ; |
public class WorkspacePersistentDataManager { /** * { @ inheritDoc } */
public List < PropertyData > getReferencesData ( final String identifier , boolean skipVersionStorage ) throws RepositoryException { } } | final WorkspaceStorageConnection con = dataContainer . openConnection ( ) ; try { final List < PropertyData > allRefs = con . getReferencesData ( identifier ) ; final List < PropertyData > refProps = new ArrayList < PropertyData > ( ) ; for ( int i = 0 ; i < allRefs . size ( ) ; i ++ ) { PropertyData ref = allRefs . get ( i ) ; if ( skipVersionStorage ) { if ( ! ref . getQPath ( ) . isDescendantOf ( Constants . JCR_VERSION_STORAGE_PATH ) ) { refProps . add ( ref ) ; } } else { refProps . add ( ref ) ; } } return refProps ; } finally { con . close ( ) ; } |
public class LineInput { public synchronized int read ( ) throws IOException { } } | int b ; if ( _pos >= _avail ) fill ( ) ; if ( _pos >= _avail ) b = - 1 ; else b = _buf [ _pos ++ ] & 255 ; return b ; |
public class FTPUploader { /** * Upload directory to specified FTP server with retries .
* @ param ftpServer
* @ param username
* @ param password
* @ param sourceDirectory
* @ param targetDirectory
* @ param maxRetryCount
* @ throws MojoExecutionException */
public void uploadDirectoryWithRetries ( final String ftpServer , final String username , final String password , final String sourceDirectory , final String targetDirectory , final int maxRetryCount ) throws MojoExecutionException { } } | int retryCount = 0 ; while ( retryCount < maxRetryCount ) { retryCount ++ ; log . info ( UPLOAD_START + ftpServer ) ; if ( uploadDirectory ( ftpServer , username , password , sourceDirectory , targetDirectory ) ) { log . info ( UPLOAD_SUCCESS + ftpServer ) ; return ; } else { log . warn ( String . format ( UPLOAD_FAILURE , retryCount , maxRetryCount ) ) ; } } // Reaching here means all retries failed .
throw new MojoExecutionException ( String . format ( UPLOAD_RETRY_FAILURE , maxRetryCount ) ) ; |
public class JsHdrsImpl { /** * Set the identity of the destination definition ( not localisation )
* Javadoc description supplied by CommonMessageHeaders interface . */
public final void setGuaranteedTargetDestinationDefinitionUUID ( SIBUuid12 value ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setGuaranteedTargetDestinationDefinitionUUID" , value ) ; if ( value != null ) getHdr2 ( ) . setField ( JsHdr2Access . GUARANTEED_SET_TARGETDESTDEFUUID , value . toByteArray ( ) ) ; else getHdr2 ( ) . setField ( JsHdr2Access . GUARANTEED_SET_TARGETDESTDEFUUID , null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setGuaranteedTargetDestinationDefinitionUUID" ) ; |
public class Collections { /** * Returns a synchronized ( thread - safe ) list backed by the specified
* list . In order to guarantee serial access , it is critical that
* < strong > all < / strong > access to the backing list is accomplished
* through the returned list . < p >
* It is imperative that the user manually synchronize on the returned
* list when iterating over it :
* < pre >
* List list = Collections . synchronizedList ( new ArrayList ( ) ) ;
* synchronized ( list ) {
* Iterator i = list . iterator ( ) ; / / Must be in synchronized block
* while ( i . hasNext ( ) )
* foo ( i . next ( ) ) ;
* < / pre >
* Failure to follow this advice may result in non - deterministic behavior .
* < p > The returned list will be serializable if the specified list is
* serializable .
* @ param < T > the class of the objects in the list
* @ param list the list to be " wrapped " in a synchronized list .
* @ return a synchronized view of the specified list . */
public static < T > List < T > synchronizedList ( List < T > list ) { } } | return ( list instanceof RandomAccess ? new SynchronizedRandomAccessList < > ( list ) : new SynchronizedList < > ( list ) ) ; |
public class StringUtils { /** * < p > Returns either the passed in CharSequence , or if the CharSequence is
* empty or { @ code null } , the value of { @ code defaultStr } . < / p >
* < pre >
* StringUtils . defaultIfEmpty ( null , " NULL " ) = " NULL "
* StringUtils . defaultIfEmpty ( " " , " NULL " ) = " NULL "
* StringUtils . defaultIfEmpty ( " " , " NULL " ) = " "
* StringUtils . defaultIfEmpty ( " bat " , " NULL " ) = " bat "
* StringUtils . defaultIfEmpty ( " " , null ) = null
* < / pre >
* @ param < T > the specific kind of CharSequence
* @ param str the CharSequence to check , may be null
* @ param defaultStr the default CharSequence to return
* if the input is empty ( " " ) or { @ code null } , may be null
* @ return the passed in CharSequence , or the default
* @ see StringUtils # defaultString ( String , String ) */
public static < T extends CharSequence > T defaultIfEmpty ( final T str , final T defaultStr ) { } } | return isEmpty ( str ) ? defaultStr : str ; |
public class FNNRGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . FNNRG__GCGID : return GCGID_EDEFAULT == null ? gcgid != null : ! GCGID_EDEFAULT . equals ( gcgid ) ; case AfplibPackage . FNNRG__TS_OFFSET : return TS_OFFSET_EDEFAULT == null ? tsOffset != null : ! TS_OFFSET_EDEFAULT . equals ( tsOffset ) ; } return super . eIsSet ( featureID ) ; |
public class PhaseApplication { /** * Directs phase application lifecycle .
* @ param phase Phase application */
protected static void harness ( PhaseApplication phase ) { } } | try { phase . start ( ) ; phase . stop ( ) ; phase . end ( ) ; } catch ( BELRuntimeException bre ) { err . println ( bre . getUserFacingMessage ( ) ) ; systemExit ( bre . getExitCode ( ) ) ; } catch ( OutOfMemoryError oom ) { err . println ( ) ; oom . printStackTrace ( ) ; long upperlimit = getRuntime ( ) . maxMemory ( ) ; double ulMB = upperlimit * 9.53674316e-7 ; final NumberFormat fmt = new DecimalFormat ( "#0" ) ; String allocation = fmt . format ( ulMB ) ; err . println ( "\n(current allocation is " + allocation + " MB)" ) ; systemExit ( ExitCode . OOM_ERROR ) ; } |
public class ProjectClient { /** * Disable this project as a shared VPC host project .
* < p > Sample code :
* < pre > < code >
* try ( ProjectClient projectClient = ProjectClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* Operation response = projectClient . disableXpnHostProject ( project ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation disableXpnHostProject ( ProjectName project ) { } } | DisableXpnHostProjectHttpRequest request = DisableXpnHostProjectHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return disableXpnHostProject ( request ) ; |
public class CmsRootHandler { /** * Returns the handler for the given id . < p >
* @ param entityId the entity id
* @ param attributeName the attribute name
* @ return the handler */
public CmsAttributeHandler getHandlerById ( String entityId , String attributeName ) { } } | return m_handlerById . get ( entityId + "/" + attributeName ) ; |
public class FieldsInner { /** * Retrieve a list of fields of a given type identified by module name .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param moduleName The name of module .
* @ param typeName The name of type .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < List < TypeFieldInner > > listByTypeAsync ( String resourceGroupName , String automationAccountName , String moduleName , String typeName , final ServiceCallback < List < TypeFieldInner > > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listByTypeWithServiceResponseAsync ( resourceGroupName , automationAccountName , moduleName , typeName ) , serviceCallback ) ; |
public class DiscordApiImpl { /** * Adds the given server to the cache .
* @ param server The server to add . */
public void addServerToCache ( ServerImpl server ) { } } | // Remove in case , there ' s an old instance in cache
removeServerFromCache ( server . getId ( ) ) ; nonReadyServers . put ( server . getId ( ) , server ) ; server . addServerReadyConsumer ( s -> { nonReadyServers . remove ( s . getId ( ) ) ; removeUnavailableServerFromCache ( s . getId ( ) ) ; servers . put ( s . getId ( ) , s ) ; } ) ; |
public class LinearSearch { /** * Search for the value in the list and return the index of the first occurrence from the
* beginning of the list . The run time of this algorithm depends on the
* implementation of the list . It is advised to use an array based implementation
* to achieve O ( n ) runtime .
* @ param < E > the type of elements in this list .
* @ param list list that we are searching in .
* @ param value value that is being searched in the list .
* @ return the index where the value is found in the list , else - 1. */
public static < E > int search ( List < E > list , E value ) { } } | return LinearSearch . search ( list , value , 1 ) ; |
public class PriorityScheduler { /** * Adds the { @ code schedulable } to the list using the given { @ code frequency } , { @ code phase } and priority .
* @ param schedulable the task to schedule
* @ param frequency the frequency
* @ param phase the phase
* @ param priority the priority */
public void add ( Schedulable schedulable , int frequency , int phase , float priority ) { } } | // Compile the record and add it to the list
schedulableRecords . add ( new PrioritySchedulableRecord ( schedulable , frequency , phase , priority ) ) ; |
public class XMLSecurityDescriptor { /** * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . utils . Checkable # check ( ) */
@ Override public void check ( ) throws JMSException { } } | Iterator < User > users = userMap . values ( ) . iterator ( ) ; while ( users . hasNext ( ) ) { User user = users . next ( ) ; user . check ( ) ; } |
public class SetBiLevelImageColorImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setNAMECOLR ( Integer newNAMECOLR ) { } } | Integer oldNAMECOLR = namecolr ; namecolr = newNAMECOLR ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . SET_BI_LEVEL_IMAGE_COLOR__NAMECOLR , oldNAMECOLR , namecolr ) ) ; |
public class SessionListener { /** * { @ inheritDoc } */
@ Override public void contextDestroyed ( ServletContextEvent event ) { } } | if ( ! instanceEnabled ) { return ; } // nettoyage avant le retrait de la webapp au cas où celui - ci ne suffise pas
SESSION_MAP_BY_ID . clear ( ) ; SESSION_COUNT . set ( 0 ) ; // issue 665 : in WildFly 10.1.0 , the MonitoringFilter may never be initialized neither destroyed .
// For this case , it is needed to stop here the JdbcWrapper initialized in contextInitialized
JdbcWrapper . SINGLETON . stop ( ) ; LOG . debug ( "JavaMelody listener destroy done" ) ; |
public class Util { /** * helper method to convert a ' delimiter ' separated string to a list .
* This is a VERY basic parsing / creation of CSV , if you seen anything superior then use < a href = " http : / / flatpack . sf . net " > FlatPack < / a > .
* @ param str
* the ' delimiter ' separated string
* @ param delimiter
* typically a ' , '
* @ return a list
* @ see < a href = " http : / / flatpack . sf . net " > FlatPack < / a > for more comprehensive parser */
public static List < String > listify ( final String str , final String delimiter ) { } } | if ( str == null ) { return Collections . emptyList ( ) ; } final StringTokenizer tok = new StringTokenizer ( str , delimiter ) ; final List < String > list = new ArrayList < > ( ) ; while ( tok . hasMoreElements ( ) ) { list . add ( StringUtil . trim ( tok . nextToken ( ) ) ) ; } return list ; |
public class TypeResolver { /** * Works like { @ link # resolveRawClass ( Type , Class , Class ) } but does not stop at raw classes . Instead , traverses
* referenced types .
* @ param cache contains a mapping of generic types to reified types . A value of { @ code null } inside a
* { @ link ReifiedParameterizedType } instance means that this type is currently being reified . */
private static Type reify ( Type genericType , final Map < TypeVariable < ? > , Type > typeVariableMap , Map < Type , Type > cache ) { } } | // Terminal case .
if ( genericType instanceof Class < ? > ) return genericType ; // For cycles of length larger than one , find its last element by chasing through cache .
while ( cache . containsKey ( genericType ) ) { genericType = cache . get ( genericType ) ; } // Recursive cases .
if ( genericType instanceof ParameterizedType ) { final ParameterizedType parameterizedType = ( ParameterizedType ) genericType ; final Type [ ] genericTypeArguments = parameterizedType . getActualTypeArguments ( ) ; final Type [ ] reifiedTypeArguments = new Type [ genericTypeArguments . length ] ; ReifiedParameterizedType result = new ReifiedParameterizedType ( parameterizedType ) ; cache . put ( genericType , result ) ; boolean changed = false ; for ( int i = 0 ; i < genericTypeArguments . length ; i ++ ) { // Cycle detection . In case a genericTypeArgument is null , it is currently being resolved ,
// thus there ' s a cycle in the type ' s structure .
if ( genericTypeArguments [ i ] == null ) { return parameterizedType ; } reifiedTypeArguments [ i ] = reify ( genericTypeArguments [ i ] , typeVariableMap , cache ) ; changed = changed || ( reifiedTypeArguments [ i ] != genericTypeArguments [ i ] ) ; } if ( ! changed ) return parameterizedType ; result . setReifiedTypeArguments ( reifiedTypeArguments ) ; return result ; } else if ( genericType instanceof GenericArrayType ) { final GenericArrayType genericArrayType = ( GenericArrayType ) genericType ; final Type genericComponentType = genericArrayType . getGenericComponentType ( ) ; final Type reifiedComponentType = reify ( genericArrayType . getGenericComponentType ( ) , typeVariableMap , cache ) ; if ( genericComponentType == reifiedComponentType ) return genericComponentType ; if ( reifiedComponentType instanceof Class < ? > ) return Array . newInstance ( ( Class < ? > ) reifiedComponentType , 0 ) . getClass ( ) ; throw new UnsupportedOperationException ( "Attempted to reify generic array type, whose generic component type " + "could not be reified to some Class<?>. Handling for this case is not implemented" ) ; } else if ( genericType instanceof TypeVariable < ? > ) { final TypeVariable < ? > typeVariable = ( TypeVariable < ? > ) genericType ; final Type mapping = typeVariableMap . get ( typeVariable ) ; if ( mapping != null ) { cache . put ( typeVariable , mapping ) ; return reify ( mapping , typeVariableMap , cache ) ; } final Type [ ] upperBounds = typeVariable . getBounds ( ) ; // Copy cache in case the bound is mutually recursive on the variable . This is to avoid sharing of
// cache in different branches of the call - graph of reify .
cache = new HashMap < Type , Type > ( cache ) ; // NOTE : According to https : / / docs . oracle . com / javase / tutorial / java / generics / bounded . html
// if there are multiple upper bounds where one bound is a class , then this must be the
// leftmost / first bound . Therefore we blindly take this one , hoping is the most relevant .
// Hibernate does the same when erasing types , see also
// https : / / github . com / hibernate / hibernate - validator / blob / 6.0 / engine / src / main / java / org / hibernate / validator / internal / util / TypeHelper . java # L181 - L186
cache . put ( typeVariable , upperBounds [ 0 ] ) ; return reify ( upperBounds [ 0 ] , typeVariableMap , cache ) ; } else if ( genericType instanceof WildcardType ) { final WildcardType wildcardType = ( WildcardType ) genericType ; final Type [ ] upperBounds = wildcardType . getUpperBounds ( ) ; final Type [ ] lowerBounds = wildcardType . getLowerBounds ( ) ; if ( upperBounds . length == 1 && lowerBounds . length == 0 ) return reify ( upperBounds [ 0 ] , typeVariableMap , cache ) ; throw new UnsupportedOperationException ( "Attempted to reify wildcard type with name '" + wildcardType + "' which has " + upperBounds . length + " upper bounds and " + lowerBounds . length + " lower bounds. " + "Reification of wildcard types is only supported for " + "the trivial case of exactly one upper bound and no lower bounds." ) ; } throw new UnsupportedOperationException ( "Reification of type with name '" + genericType . getTypeName ( ) + "' and " + "class name '" + genericType . getClass ( ) . getName ( ) + "' is not implemented." ) ; |
public class CmsFlexCacheEntry { /** * Sets the expiration date of this Flex cache entry exactly to the
* given time . < p >
* @ param dateExpires the time to expire this cache entry */
public void setDateExpires ( long dateExpires ) { } } | m_dateExpires = dateExpires ; if ( LOG . isDebugEnabled ( ) ) { long now = System . currentTimeMillis ( ) ; LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXCACHEENTRY_SET_EXPIRATION_DATE_3 , new Long ( m_dateExpires ) , new Long ( now ) , new Long ( m_dateExpires - now ) ) ) ; } |
public class SimpleDataModel { /** * Updates the specified item if found in the model , prepends it otherwise . */
public void updateItem ( T item ) { } } | if ( _items == null ) { return ; } int idx = _items . indexOf ( item ) ; if ( idx == - 1 ) { _items . add ( 0 , item ) ; } else { _items . set ( idx , item ) ; } |
public class Cache { /** * Looks up Records in the Cache ( a wrapper around lookupRecords ) . Unlike
* lookupRecords , this given no indication of why failure occurred .
* @ param name The name to look up
* @ param type The type to look up
* @ return An array of RRsets , or null
* @ see Credibility */
public RRset [ ] findAnyRecords ( Name name , int type ) { } } | return findRecords ( name , type , Credibility . GLUE ) ; |
public class InvocationsFinder { /** * some examples how it works :
* Given invocations sequence :
* 1,1,2,1
* if wanted is 1 and mode is times ( 2 ) then returns
* 1,1
* if wanted is 1 and mode is atLeast ( ) then returns
* 1,1,1
* if wanted is 1 and mode is times ( x ) , where x ! = 2 then returns
* 1,1,1 */
public static List < Invocation > findMatchingChunk ( List < Invocation > invocations , MatchableInvocation wanted , int wantedCount , InOrderContext context ) { } } | List < Invocation > unverified = removeVerifiedInOrder ( invocations , context ) ; List < Invocation > firstChunk = getFirstMatchingChunk ( wanted , unverified ) ; if ( wantedCount != firstChunk . size ( ) ) { return findAllMatchingUnverifiedChunks ( invocations , wanted , context ) ; } else { return firstChunk ; } |
public class bioSourceImpl { /** * - - - - - Interface BioPAXElement - - - - - */
protected boolean semanticallyEquivalent ( BioPAXElement element ) { } } | final bioSource bioSource = ( bioSource ) element ; return ( CELLTYPE != null ? CELLTYPE . equals ( bioSource . getCELLTYPE ( ) ) : bioSource . getCELLTYPE ( ) == null ) && ( TAXON_XREF != null ? TAXON_XREF . equals ( bioSource . getTAXON_XREF ( ) ) : bioSource . getTAXON_XREF ( ) == null ) && ( TISSUE != null ? ! TISSUE . equals ( bioSource . getTISSUE ( ) ) : bioSource . getTISSUE ( ) != null ) ; |
public class UBL22ReaderBuilder { /** * Create a new reader builder .
* @ param aClass
* The UBL class to be read . May not be < code > null < / code > .
* @ return The new reader builder . Never < code > null < / code > .
* @ param < T >
* The UBL 2.2 document implementation type */
@ Nonnull public static < T > UBL22ReaderBuilder < T > create ( @ Nonnull final Class < T > aClass ) { } } | return new UBL22ReaderBuilder < > ( aClass ) ; |
public class CipherLite { /** * Returns the inverse of the current { @ link CipherLite } . */
CipherLite createInverse ( ) throws InvalidKeyException , NoSuchAlgorithmException , NoSuchProviderException , NoSuchPaddingException , InvalidAlgorithmParameterException { } } | int inversedMode ; if ( cipherMode == Cipher . DECRYPT_MODE ) inversedMode = Cipher . ENCRYPT_MODE ; else if ( cipherMode == Cipher . ENCRYPT_MODE ) inversedMode = Cipher . DECRYPT_MODE ; else throw new UnsupportedOperationException ( ) ; return scheme . createCipherLite ( secreteKey , cipher . getIV ( ) , inversedMode , cipher . getProvider ( ) , true ) ; |
public class PdfBoxGraphics2DFontTextDrawerDefaultFonts { /** * Find a PDFont for the given font object , which does not need to be embedded .
* @ param font
* font for which to find a suitable default font
* @ return null if no default font is found or a default font which does not
* need to be embedded . */
public static PDFont mapDefaultFonts ( Font font ) { } } | /* * Map default font names to the matching families . */
if ( fontNameEqualsAnyOf ( font , Font . SANS_SERIF , Font . DIALOG , Font . DIALOG_INPUT , "Arial" , "Helvetica" ) ) return chooseMatchingHelvetica ( font ) ; if ( fontNameEqualsAnyOf ( font , Font . MONOSPACED , "courier" , "courier new" ) ) return chooseMatchingCourier ( font ) ; if ( fontNameEqualsAnyOf ( font , Font . SERIF , "Times" , "Times New Roman" , "Times Roman" ) ) return chooseMatchingTimes ( font ) ; if ( fontNameEqualsAnyOf ( font , "Symbol" ) ) return PDType1Font . SYMBOL ; if ( fontNameEqualsAnyOf ( font , "ZapfDingbats" , "Dingbats" ) ) return PDType1Font . ZAPF_DINGBATS ; return null ; |
public class MultinomialDPMM { /** * { @ inheritDoc } */
@ Override protected Cluster createNewCluster ( Integer clusterId ) { } } | ModelParameters modelParameters = knowledgeBase . getModelParameters ( ) ; TrainingParameters trainingParameters = knowledgeBase . getTrainingParameters ( ) ; Cluster c = new Cluster ( clusterId , modelParameters . getD ( ) , trainingParameters . getAlphaWords ( ) ) ; c . setFeatureIds ( modelParameters . getFeatureIds ( ) ) ; return c ; |
public class JvmTypesBuilder { /** * / * @ Nullable */
@ Deprecated public JvmAnnotationReference toAnnotation ( /* @ Nullable */
EObject sourceElement , /* @ Nullable */
String annotationTypeName , /* @ Nullable */
Object value ) { } } | JvmAnnotationReference result = typesFactory . createJvmAnnotationReference ( ) ; JvmType jvmType = references . findDeclaredType ( annotationTypeName , sourceElement ) ; if ( jvmType == null ) { throw new IllegalArgumentException ( "The type " + annotationTypeName + " is not on the classpath." ) ; } if ( ! ( jvmType instanceof JvmAnnotationType ) ) { throw new IllegalArgumentException ( "The given class " + annotationTypeName + " is not an annotation type." ) ; } result . setAnnotation ( ( JvmAnnotationType ) jvmType ) ; if ( value != null ) { if ( value instanceof String ) { JvmStringAnnotationValue annotationValue = typesFactory . createJvmStringAnnotationValue ( ) ; annotationValue . getValues ( ) . add ( ( String ) value ) ; result . getExplicitValues ( ) . add ( annotationValue ) ; } } return result ; |
public class ResolvableType { /** * Return a { @ link ResolvableType } for the specified { @ link Field } with a given
* implementation .
* < p > Use this variant when the class that declares the field includes generic
* parameter variables that are satisfied by the implementation class .
* @ param field the source field
* @ param implementationClass the implementation class
* @ return a { @ link ResolvableType } for the specified field
* @ see # forField ( Field ) */
public static ResolvableType forField ( Field field , Class < ? > implementationClass ) { } } | Assert . notNull ( field , "Field must not be null" ) ; ResolvableType owner = forType ( implementationClass ) . as ( field . getDeclaringClass ( ) ) ; return forType ( null , new FieldTypeProvider ( field ) , owner . asVariableResolver ( ) ) ; |
public class UserDetail { /** * A list of the inline policies embedded in the user .
* @ return A list of the inline policies embedded in the user . */
public java . util . List < PolicyDetail > getUserPolicyList ( ) { } } | if ( userPolicyList == null ) { userPolicyList = new com . amazonaws . internal . SdkInternalList < PolicyDetail > ( ) ; } return userPolicyList ; |
public class NavigationTreeTag { /** * Creates the nested & lt ; ul & gt ; and & lt ; li & gt ; tags used by TreeView .
* The first level is expanded .
* < a href = " http : / / developer . yahoo . com / yui / treeview / # start " > http : / / developer . yahoo . com / yui / treeview / # start < / a > */
@ Override public void doTag ( ) throws JspTagException , IOException { } } | try { final PageContext pageContext = ( PageContext ) getJspContext ( ) ; NavigationTreeRenderer . writeNavigationTree ( pageContext . getServletContext ( ) , pageContext . getELContext ( ) , ( HttpServletRequest ) pageContext . getRequest ( ) , ( HttpServletResponse ) pageContext . getResponse ( ) , pageContext . getOut ( ) , root , skipRoot , yuiConfig , includeElements , target , thisDomain , thisBook , thisPage , linksToDomain , linksToBook , linksToPage , maxDepth ) ; } catch ( ServletException e ) { throw new JspTagException ( e ) ; } |
public class VersioningConfigurationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( VersioningConfiguration versioningConfiguration , ProtocolMarshaller protocolMarshaller ) { } } | if ( versioningConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( versioningConfiguration . getUnlimited ( ) , UNLIMITED_BINDING ) ; protocolMarshaller . marshall ( versioningConfiguration . getMaxVersions ( ) , MAXVERSIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CRLDistPoint { /** * Return the distribution points making up the sequence .
* @ return DistributionPoint [ ] */
public DistributionPoint [ ] getDistributionPoints ( ) { } } | DistributionPoint [ ] dp = new DistributionPoint [ seq . size ( ) ] ; for ( int i = 0 ; i != seq . size ( ) ; i ++ ) { dp [ i ] = DistributionPoint . getInstance ( seq . getObjectAt ( i ) ) ; } return dp ; |
public class GenericDraweeHierarchy { /** * Applies scale type and rounding ( both if specified ) . */
@ Nullable private Drawable buildBranch ( @ Nullable Drawable drawable , @ Nullable ScalingUtils . ScaleType scaleType ) { } } | drawable = WrappingUtils . maybeApplyLeafRounding ( drawable , mRoundingParams , mResources ) ; drawable = WrappingUtils . maybeWrapWithScaleType ( drawable , scaleType ) ; return drawable ; |
public class VersionUtility { /** * Convert a string into a VersionRange , reusing common VersionRange
* objects if we can .
* @ param str String to convert to a VersionRange
* @ return VersionRange */
public static final VersionRange stringToVersionRange ( String str ) { } } | if ( str == null || str . isEmpty ( ) || "0" . equals ( str ) ) return EMPTY_RANGE ; if ( "[1,1.0.100)" . equals ( str ) || "[1.0,1.0.100)" . equals ( str ) || "[1.0.0,1.0.100)" . equals ( str ) ) return INITIAL_RANGE ; return new VersionRange ( str ) ; |
public class AssetsApi { /** * Get corporation asset locations ( asynchronously ) Return locations for a
* set of item ids , which you can get from corporation assets endpoint .
* Coordinates for items in hangars or stations are set to ( 0,0,0 ) - - -
* Requires one of the following EVE corporation role ( s ) : Director SSO
* Scope : esi - assets . read _ corporation _ assets . v1
* @ param corporationId
* An EVE corporation ID ( required )
* @ param requestBody
* A list of item ids ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ param callback
* The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException
* If fail to process the API call , e . g . serializing the request
* body object */
public com . squareup . okhttp . Call postCorporationsCorporationIdAssetsLocationsAsync ( Integer corporationId , List < Long > requestBody , String datasource , String token , final ApiCallback < List < CorporationAssetsLocationsResponse > > callback ) throws ApiException { } } | com . squareup . okhttp . Call call = postCorporationsCorporationIdAssetsLocationsValidateBeforeCall ( corporationId , requestBody , datasource , token , callback ) ; Type localVarReturnType = new TypeToken < List < CorporationAssetsLocationsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ; |
public class HelpModule { /** * Adds locale information to a help module id .
* @ param id Help module id .
* @ param locale Locale ( may be null ) .
* @ return The id with locale information appended . */
public static String getLocalizedId ( String id , Locale locale ) { } } | String locstr = locale == null ? "" : ( "_" + locale . toString ( ) ) ; return id + locstr ; |
public class FileUtils { /** * Returns a { @ link List } consisting of the lines of the provided { @ link CharSource } in the
* order given . */
public static ImmutableList < String > loadStringList ( final CharSource source ) throws IOException { } } | return FluentIterable . from ( source . readLines ( ) ) . filter ( not ( IsCommentLine . INSTANCE ) ) . toList ( ) ; |
public class Error { /** * Thrown when the xml configuration doesn ' t contains the classes configuration .
* @ param destination destination class name
* @ param source source class name
* @ param xml xml path */
public static void configNotPresent ( Class < ? > destination , Class < ? > source , XML xml ) { } } | throw new MappingNotFoundException ( MSG . INSTANCE . message ( Constants . mappingNotFoundException2path , destination . getSimpleName ( ) , source . getSimpleName ( ) , xml . getXmlPath ( ) ) ) ; |
public class PersistentExecutorImpl { /** * { @ inheritDoc } */
@ Override public < T > TimerStatus < T > getTimerStatus ( long taskId ) throws Exception { } } | TransactionController tranController = new TransactionController ( ) ; TaskRecord taskRecord = null ; try { tranController . preInvoke ( ) ; taskRecord = taskStore . findById ( taskId , null , true ) ; } catch ( Throwable x ) { tranController . setFailure ( x ) ; } finally { Exception x = tranController . postInvoke ( Exception . class ) ; if ( x != null ) throw x ; } return taskRecord == null ? null : new TaskStatusImpl < T > ( taskRecord , this ) ; |
public class Continuation { /** * Do not use - - for internal use only . */
public void successExecutionCycle ( ) { } } | // FOR A PRIMER ON WHAT WE ' RE DOING HERE , SEE LARGE BLOCK OF COMMENT AT BEGINNING OF CLASS
if ( nextUnloadPointer != null ) { nextUnloadPointer . setNext ( firstCutpointPointer ) ; } else { firstPointer = firstCutpointPointer ; } nextLoadPointer = firstPointer ; // reset next load pointer so we load from the beginning
nextUnloadPointer = null ; // reset unload pointer
firstCutpointPointer = null ; // reset cutpoint list |
public class NormalisedOntology { /** * Normalises and loads a set of axioms .
* @ param inclusions */
public void loadAxioms ( final Set < ? extends Axiom > inclusions ) { } } | long start = System . currentTimeMillis ( ) ; if ( log . isInfoEnabled ( ) ) log . info ( "Loading " + inclusions . size ( ) + " axioms" ) ; Set < Inclusion > normInclusions = normalise ( inclusions ) ; if ( log . isInfoEnabled ( ) ) log . info ( "Processing " + normInclusions . size ( ) + " normalised axioms" ) ; Statistics . INSTANCE . setTime ( "normalisation" , System . currentTimeMillis ( ) - start ) ; start = System . currentTimeMillis ( ) ; for ( Inclusion i : normInclusions ) { addTerm ( i . getNormalForm ( ) ) ; } Statistics . INSTANCE . setTime ( "indexing" , System . currentTimeMillis ( ) - start ) ; |
public class ApacheHttpRequestFactory { /** * Update the provided request configuration builder to specify the proxy authentication schemes that should be used when
* authenticating against the HTTP proxy .
* @ see ClientConfiguration # setProxyAuthenticationMethods ( List ) */
private void addProxyConfig ( RequestConfig . Builder requestConfigBuilder , HttpClientSettings settings ) { } } | if ( settings . isProxyEnabled ( ) && settings . isAuthenticatedProxy ( ) && settings . getProxyAuthenticationMethods ( ) != null ) { List < String > apacheAuthenticationSchemes = new ArrayList < String > ( ) ; for ( ProxyAuthenticationMethod authenticationMethod : settings . getProxyAuthenticationMethods ( ) ) { apacheAuthenticationSchemes . add ( toApacheAuthenticationScheme ( authenticationMethod ) ) ; } requestConfigBuilder . setProxyPreferredAuthSchemes ( apacheAuthenticationSchemes ) ; } |
public class StringUtils { /** * 进行分割 , 不支持正则式 .
* @ param source
* @ param separator
* @ param removeEmpty
* @ return */
public static ArrayList < String > split ( String source , String separator , boolean removeEmpty ) { } } | if ( source == null || source . isEmpty ( ) ) return null ; ArrayList < String > values = new ArrayList < String > ( ) ; if ( separator == null || separator . isEmpty ( ) ) { values . add ( source ) ; return values ; } String tmpStr = new String ( source ) ; int idx = 0 ; String tt ; while ( true ) { int tmp = tmpStr . indexOf ( separator , idx ) ; if ( tmp == - 1 ) { tt = tmpStr . substring ( idx ) ; if ( tt != null && ! tt . isEmpty ( ) ) values . add ( tmpStr . substring ( idx ) ) ; break ; } tt = tmpStr . substring ( idx , tmp ) ; if ( tt != null && ! tt . isEmpty ( ) ) values . add ( tmpStr . substring ( idx , tmp ) ) ; idx = tmp + separator . length ( ) ; } return values ; |
public class AbstractLifecycleProvider { /** * Add a lifecycle Action to this provider . The action will called back when the lifecycle stage is
* hit and contain an object that was created by the provider . */
@ Override public void addAction ( final LifecycleStage stage , final LifecycleAction < T > action ) { } } | stageEvents . add ( new StageEvent ( stage , action ) ) ; |
public class XcodeProjectWriter { /** * Add file reference of product to map of objects .
* @ param objects
* object map .
* @ param linkTarget
* build description for executable or shared library .
* @ return file reference to generated executable or shared library . */
private PBXObjectRef addProduct ( final Map objects , final TargetInfo linkTarget ) { } } | // create file reference for executable file
// forget Ant ' s location , just place in XCode ' s default location
final PBXObjectRef executable = createPBXFileReference ( "BUILD_PRODUCTS_DIR" , linkTarget . getOutput ( ) . getParent ( ) , linkTarget . getOutput ( ) ) ; final Map executableProperties = executable . getProperties ( ) ; final String fileType = getFileType ( linkTarget ) ; executableProperties . put ( "explicitFileType" , fileType ) ; executableProperties . put ( "includeInIndex" , "0" ) ; objects . put ( executable . getID ( ) , executableProperties ) ; return executable ; |
public class ListTagsResult { /** * A list of resource tags .
* @ return A list of resource tags . */
public java . util . List < ResourceTag > getResourceTagList ( ) { } } | if ( resourceTagList == null ) { resourceTagList = new com . amazonaws . internal . SdkInternalList < ResourceTag > ( ) ; } return resourceTagList ; |
public class CoinbaseAccountServiceRaw { /** * Authenticated resource which returns order details for a specific order id or merchant custom .
* @ param orderIdOrCustom
* @ return
* @ throws IOException
* @ see < a
* href = " https : / / coinbase . com / api / doc / 1.0 / orders / show . html " > coinbase . com / api / doc / 1.0 / orders / show . html < / a > */
public CoinbaseOrder getCoinbaseOrder ( String orderIdOrCustom ) throws IOException { } } | final CoinbaseOrder order = coinbase . getOrder ( orderIdOrCustom , exchange . getExchangeSpecification ( ) . getApiKey ( ) , signatureCreator , exchange . getNonceFactory ( ) ) ; return handleResponse ( order ) ; |
public class ListOrderItemsByNextTokenResult { /** * Read members from a MwsReader .
* @ param r
* The reader to read from . */
@ Override public void readFragmentFrom ( MwsReader r ) { } } | nextToken = r . read ( "NextToken" , String . class ) ; amazonOrderId = r . read ( "AmazonOrderId" , String . class ) ; orderItems = r . readList ( "OrderItems" , "OrderItem" , OrderItem . class ) ; |
public class EventRepository { /** * Retrive all { @ link Event } s from the DB
* @ return { @ link Collection } of { @ link Event } s from the DB */
@ Transactional ( readOnly = true ) public Collection < Event > find ( SearchCriteria searchCriteria ) { } } | // if user exists , compare on [ username ]
// if detection point exists , compare on [ category , label , threshold count , threshold interval ]
// if detection system ids exist , do an " in "
// if earliest exists , compare on [ event date > earliest ]
CriteriaBuilder criteriaBuilder = em . getCriteriaBuilder ( ) ; CriteriaQuery < Event > criteriaQuery = criteriaBuilder . createQuery ( Event . class ) ; Root < Event > root = criteriaQuery . from ( Event . class ) ; Collection < Predicate > conditions = new ArrayList < > ( ) ; if ( searchCriteria . getUser ( ) != null ) { Predicate userCondition = criteriaBuilder . equal ( root . get ( "user" ) . get ( "username" ) , searchCriteria . getUser ( ) . getUsername ( ) ) ; conditions . add ( userCondition ) ; } if ( searchCriteria . getDetectionPoint ( ) != null ) { if ( searchCriteria . getDetectionPoint ( ) . getCategory ( ) != null ) { Predicate categoryCondition = criteriaBuilder . equal ( root . get ( "detectionPoint" ) . get ( "category" ) , searchCriteria . getDetectionPoint ( ) . getCategory ( ) ) ; conditions . add ( categoryCondition ) ; } if ( searchCriteria . getDetectionPoint ( ) . getLabel ( ) != null ) { Predicate labelCondition = criteriaBuilder . equal ( root . get ( "detectionPoint" ) . get ( "label" ) , searchCriteria . getDetectionPoint ( ) . getLabel ( ) ) ; conditions . add ( labelCondition ) ; } if ( searchCriteria . getDetectionPoint ( ) . getThreshold ( ) != null ) { if ( searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getCount ( ) > 0 ) { Predicate countCondition = criteriaBuilder . equal ( root . get ( "detectionPoint" ) . get ( "threshold" ) . get ( "count" ) , searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getCount ( ) ) ; conditions . add ( countCondition ) ; } if ( searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getInterval ( ) != null ) { if ( searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getInterval ( ) . getUnit ( ) != null ) { Predicate durationCondition = criteriaBuilder . equal ( root . get ( "detectionPoint" ) . get ( "threshold" ) . get ( "interval" ) . get ( "duration" ) , searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getInterval ( ) . getDuration ( ) ) ; conditions . add ( durationCondition ) ; } if ( searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getInterval ( ) . getDuration ( ) > 0 ) { Predicate unitCondition = criteriaBuilder . equal ( root . get ( "detectionPoint" ) . get ( "threshold" ) . get ( "interval" ) . get ( "unit" ) , searchCriteria . getDetectionPoint ( ) . getThreshold ( ) . getInterval ( ) . getUnit ( ) ) ; conditions . add ( unitCondition ) ; } } } } if ( searchCriteria . getDetectionSystemIds ( ) != null ) { Predicate detectionSystemCondition = root . get ( "detectionSystem" ) . get ( "detectionSystemId" ) . in ( searchCriteria . getDetectionSystemIds ( ) ) ; conditions . add ( detectionSystemCondition ) ; } if ( conditions . size ( ) > 0 ) { criteriaQuery . where ( criteriaBuilder . and ( conditions . toArray ( new Predicate [ 0 ] ) ) ) ; } criteriaQuery . orderBy ( criteriaBuilder . asc ( root . get ( "timestamp" ) ) ) ; TypedQuery < Event > query = em . createQuery ( criteriaQuery ) ; List < Event > result = query . getResultList ( ) ; return result ; |
public class StepFunctionsActionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StepFunctionsAction stepFunctionsAction , ProtocolMarshaller protocolMarshaller ) { } } | if ( stepFunctionsAction == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stepFunctionsAction . getExecutionNamePrefix ( ) , EXECUTIONNAMEPREFIX_BINDING ) ; protocolMarshaller . marshall ( stepFunctionsAction . getStateMachineName ( ) , STATEMACHINENAME_BINDING ) ; protocolMarshaller . marshall ( stepFunctionsAction . getRoleArn ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ApiOvhMe { /** * Change xdsl settings linked to the nichandle
* REST : POST / me / xdsl / setting
* @ param resellerModemBasicConfig [ required ] Let the modem with vendor configuration . It prevent to apply the config managed by ovh manager
* @ param resellerFastModemShipping [ required ] Send the modem as soon as possible , do not wait the xdsl line to be active */
public void xdsl_setting_POST ( Boolean resellerFastModemShipping , Boolean resellerModemBasicConfig ) throws IOException { } } | String qPath = "/me/xdsl/setting" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "resellerFastModemShipping" , resellerFastModemShipping ) ; addBody ( o , "resellerModemBasicConfig" , resellerModemBasicConfig ) ; exec ( qPath , "POST" , sb . toString ( ) , o ) ; |
public class CmsContainerpageController { /** * Initializes the controller . < p >
* @ param handler the container - page handler
* @ param dndHandler the drag and drop handler
* @ param contentEditorHandler the XML content editor handler
* @ param containerpageUtil the container - page utility */
public void init ( CmsContainerpageHandler handler , CmsDNDHandler dndHandler , CmsContentEditorHandler contentEditorHandler , CmsContainerpageUtil containerpageUtil ) { } } | Window . addResizeHandler ( new ResizeHandler ( ) { public void onResize ( ResizeEvent event ) { CmsContainerpageController . this . onResize ( ) ; } } ) ; m_containerpageUtil = containerpageUtil ; m_handler = handler ; m_contentEditorHandler = contentEditorHandler ; m_dndHandler = dndHandler ; m_cntDndController = m_dndHandler . getController ( ) ; m_elements = new HashMap < String , CmsContainerElementData > ( ) ; m_newElements = new HashMap < String , CmsContainerElementData > ( ) ; m_containers = new HashMap < String , CmsContainer > ( ) ; if ( m_data == null ) { m_handler . m_editor . disableEditing ( Messages . get ( ) . key ( Messages . ERR_READING_CONTAINER_PAGE_DATA_0 ) ) ; CmsErrorDialog dialog = new CmsErrorDialog ( Messages . get ( ) . key ( Messages . ERR_READING_CONTAINER_PAGE_DATA_0 ) , null ) ; dialog . center ( ) ; return ; } // ensure any embedded flash players are set opaque so UI elements may be placed above them
CmsDomUtil . fixFlashZindex ( RootPanel . getBodyElement ( ) ) ; m_targetContainers = m_containerpageUtil . consumeContainers ( m_containers , RootPanel . getBodyElement ( ) ) ; updateContainerLevelInfo ( ) ; resetEditButtons ( ) ; Event . addNativePreviewHandler ( new NativePreviewHandler ( ) { public void onPreviewNativeEvent ( NativePreviewEvent event ) { previewNativeEvent ( event ) ; } } ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_data . getNoEditReason ( ) ) ) { m_handler . m_editor . disableEditing ( m_data . getNoEditReason ( ) ) ; } else { checkLockInfo ( ) ; } // initialize the browser history handler
History . addValueChangeHandler ( new ValueChangeHandler < String > ( ) { public void onValueChange ( ValueChangeEvent < String > event ) { String historyToken = event . getValue ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( historyToken ) ) { getContentEditorHandler ( ) . openEditorForHistory ( historyToken ) ; } else { getContentEditorHandler ( ) . closeContentEditor ( ) ; } } } ) ; AsyncCallback < Void > doNothing = new AsyncCallback < Void > ( ) { public void onFailure ( Throwable caught ) { // nothing to do
} public void onSuccess ( Void result ) { // nothing to do
} } ; getContainerpageService ( ) . setLastPage ( CmsCoreProvider . get ( ) . getStructureId ( ) , m_data . getDetailId ( ) , doNothing ) ; // check if there is already a history item available
String historyToken = History . getToken ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( historyToken ) ) { m_contentEditorHandler . openEditorForHistory ( historyToken ) ; } updateGalleryData ( false , null ) ; |
public class TableRenderer { /** * Table Row Rendering */
public final void openHeaderRow ( TrTag . State state , AbstractRenderAppender appender ) { } } | if ( state == null ) state = TR_STATE ; appender . append ( LINE_BREAK ) ; _trRenderer . doStartTag ( appender , state ) ; |
public class Aromaticity { /** * Find the bonds of a { @ code molecule } which this model determined were
* aromatic .
* < blockquote > < pre > { @ code
* Aromaticity aromaticity = new Aromaticity ( ElectronDonation . cdk ( ) ,
* Cycles . all ( ) ) ;
* IAtomContainer container = . . . ;
* try {
* Set < IBond > bonds = aromaticity . findBonds ( container ) ;
* int nAromaticBonds = bonds . size ( ) ;
* } catch ( CDKException e ) {
* / / cycle computation was intractable
* } < / pre > < / blockquote >
* @ param molecule the molecule to apply the model to
* @ return the set of bonds which are aromatic
* @ throws CDKException a problem occurred with the cycle perception - one
* can retry with a simpler cycle set */
public Set < IBond > findBonds ( IAtomContainer molecule ) throws CDKException { } } | // build graph data - structures for fast cycle perception
final EdgeToBondMap bondMap = EdgeToBondMap . withSpaceFor ( molecule ) ; final int [ ] [ ] graph = GraphUtil . toAdjList ( molecule , bondMap ) ; // initial ring / cycle search and get the contribution from each atom
final RingSearch ringSearch = new RingSearch ( molecule , graph ) ; final int [ ] electrons = model . contribution ( molecule , ringSearch ) ; final Set < IBond > bonds = Sets . newHashSetWithExpectedSize ( molecule . getBondCount ( ) ) ; // obtain the subset of electron contributions which are > = 0 ( i . e .
// allowed to be aromatic ) - we then find the cycles in this subgraph
// and ' lift ' the indices back to the original graph using the subset
// as a lookup
final int [ ] subset = subset ( electrons ) ; final int [ ] [ ] subgraph = GraphUtil . subgraph ( graph , subset ) ; // for each cycle if the electron sum is valid add the bonds of the
// cycle to the set or aromatic bonds
for ( final int [ ] cycle : cycles . find ( molecule , subgraph , subgraph . length ) . paths ( ) ) { if ( checkElectronSum ( cycle , electrons , subset ) ) { for ( int i = 1 ; i < cycle . length ; i ++ ) { bonds . add ( bondMap . get ( subset [ cycle [ i ] ] , subset [ cycle [ i - 1 ] ] ) ) ; } } } return bonds ; |
public class MapperTemplate { /** * 是否支持该通用方法
* @ param msId
* @ return */
public boolean supportMethod ( String msId ) { } } | Class < ? > mapperClass = getMapperClass ( msId ) ; if ( mapperClass != null && this . mapperClass . isAssignableFrom ( mapperClass ) ) { String methodName = getMethodName ( msId ) ; return methodMap . get ( methodName ) != null ; } return false ; |
public class Config { /** * Set the default base template path and devMode by JFinal before configEngine ( engine ) invoked
* They can be reconfigured in configEngine ( engine ) */
private static void initEngine ( ) { } } | engine . setDevMode ( constants . getDevMode ( ) ) ; // 避免在某些环境下 webRootPath 值为 blank 时无法启动项目
if ( StrKit . notBlank ( PathKit . getWebRootPath ( ) ) ) { engine . setBaseTemplatePath ( PathKit . getWebRootPath ( ) ) ; } |
public class Linkage { /** * Initialize the linkage with the lower triangular proximity matrix . */
void init ( double [ ] [ ] proximity ) { } } | size = proximity . length ; this . proximity = new float [ size * ( size + 1 ) / 2 ] ; // row wise
/* for ( int i = 0 , k = 0 ; i < size ; i + + ) {
double [ ] pi = proximity [ i ] ;
for ( int j = 0 ; j < = i ; j + + , k + + ) {
this . proximity [ k ] = ( float ) pi [ j ] ; */
// column wise
for ( int j = 0 , k = 0 ; j < size ; j ++ ) { for ( int i = j ; i < size ; i ++ , k ++ ) { this . proximity [ k ] = ( float ) proximity [ i ] [ j ] ; } } |
public class LicenseClient { /** * Retrieves the list of licenses available in the specified project . This method does not get any
* licenses that belong to other projects , including licenses attached to publicly - available
* images , like Debian 9 . If you want to get a list of publicly - available licenses , use this
* method to make a request to the respective image project , such as debian - cloud or
* windows - cloud .
* < p > Sample code :
* < pre > < code >
* try ( LicenseClient licenseClient = LicenseClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* for ( License element : licenseClient . listLicenses ( project ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final ListLicensesPagedResponse listLicenses ( ProjectName project ) { } } | ListLicensesHttpRequest request = ListLicensesHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return listLicenses ( request ) ; |
public class ESigService { /** * Registers a new esig type . If a type with the same mnemonic id is already registered , an
* exception is thrown .
* @ param eSigType The new type to register . */
@ Override public void register ( IESigType eSigType ) throws Exception { } } | if ( typeRegistry . get ( eSigType . getESigTypeId ( ) ) != null ) { throw new Exception ( "Duplicate esig type identifier: " + eSigType . getESigTypeId ( ) ) ; } typeRegistry . put ( eSigType . getESigTypeId ( ) , eSigType ) ; init ( eSigType ) ; |
public class SeverityMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Severity severity , ProtocolMarshaller protocolMarshaller ) { } } | if ( severity == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( severity . getProduct ( ) , PRODUCT_BINDING ) ; protocolMarshaller . marshall ( severity . getNormalized ( ) , NORMALIZED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DateTimeFormatterBuilder { /** * Gets the formatting pattern for date and time styles for a locale and chronology .
* The locale and chronology are used to lookup the locale specific format
* for the requested dateStyle and / or timeStyle .
* @ param dateStyle the FormatStyle for the date
* @ param timeStyle the FormatStyle for the time
* @ param chrono the Chronology , non - null
* @ param locale the locale , non - null
* @ return the locale and Chronology specific formatting pattern
* @ throws IllegalArgumentException if both dateStyle and timeStyle are null */
public static String getLocalizedDateTimePattern ( FormatStyle dateStyle , FormatStyle timeStyle , Chronology chrono , Locale locale ) { } } | Jdk8Methods . requireNonNull ( locale , "locale" ) ; Jdk8Methods . requireNonNull ( chrono , "chrono" ) ; if ( dateStyle == null && timeStyle == null ) { throw new IllegalArgumentException ( "Either dateStyle or timeStyle must be non-null" ) ; } DateFormat dateFormat ; if ( dateStyle != null ) { if ( timeStyle != null ) { dateFormat = DateFormat . getDateTimeInstance ( dateStyle . ordinal ( ) , timeStyle . ordinal ( ) , locale ) ; } else { dateFormat = DateFormat . getDateInstance ( dateStyle . ordinal ( ) , locale ) ; } } else { dateFormat = DateFormat . getTimeInstance ( timeStyle . ordinal ( ) , locale ) ; } if ( dateFormat instanceof SimpleDateFormat ) { return ( ( SimpleDateFormat ) dateFormat ) . toPattern ( ) ; } throw new IllegalArgumentException ( "Unable to determine pattern" ) ; |
public class ApiClient { /** * Helper method to set API key prefix for the first API key authentication .
* @ param apiKeyPrefix API key prefix */
public void setApiKeyPrefix ( String apiKeyPrefix ) { } } | for ( Authentication auth : authentications . values ( ) ) { if ( auth instanceof ApiKeyAuth ) { ( ( ApiKeyAuth ) auth ) . setApiKeyPrefix ( apiKeyPrefix ) ; return ; } } throw new RuntimeException ( "No API key authentication configured!" ) ; |
public class AbstractPluginBeanValidation { /** * XS : Element */
private void _processElement ( @ Nonnull final CElementPropertyInfo aElement , @ Nonnull final ClassOutline aClassOutline ) { } } | final ParticleImpl aParticle = ( ParticleImpl ) aElement . getSchemaComponent ( ) ; final BigInteger aMinOccurs = aParticle . getMinOccurs ( ) ; final BigInteger aMaxOccurs = aParticle . getMaxOccurs ( ) ; final JFieldVar aField = aClassOutline . implClass . fields ( ) . get ( aElement . getName ( false ) ) ; // workaround for choices
final boolean bRequired = aElement . isRequired ( ) ; if ( MathHelper . isLT0 ( aMinOccurs ) || ( MathHelper . isGE1 ( aMinOccurs ) && bRequired ) ) { if ( ! _hasAnnotation ( aField , NotNull . class ) ) aField . annotate ( NotNull . class ) ; } if ( aMaxOccurs . compareTo ( BigInteger . ONE ) > 0 ) { if ( ! _hasAnnotation ( aField , Size . class ) ) { aField . annotate ( Size . class ) . param ( "min" , aMinOccurs . intValue ( ) ) . param ( "max" , aMaxOccurs . intValue ( ) ) ; } } if ( UNBOUNDED . equals ( aMaxOccurs ) && MathHelper . isGT0 ( aMinOccurs ) ) { if ( ! _hasAnnotation ( aField , Size . class ) ) { aField . annotate ( Size . class ) . param ( "min" , aMinOccurs . intValue ( ) ) ; } } // For all collection types
// For all types of generated classes
final String sFullName = aField . type ( ) . erasure ( ) . fullName ( ) ; if ( aField . type ( ) . isArray ( ) || sFullName . equals ( "java.util.Collection" ) || sFullName . equals ( "java.util.Set" ) || sFullName . equals ( "java.util.List" ) || sFullName . equals ( "java.util.Map" ) || aClassOutline . implClass . owner ( ) . _getClass ( aField . type ( ) . fullName ( ) ) != null ) { // Complex type requires @ Valid for nested validation
if ( ! _hasAnnotation ( aField , Valid . class ) ) aField . annotate ( Valid . class ) ; } final XSTerm aTerm = aParticle . getTerm ( ) ; if ( aTerm instanceof ElementDecl ) _processElement ( aField , ( ElementDecl ) aTerm ) ; else if ( aTerm instanceof DelayedRef . Element ) { final XSElementDecl xsElementDecl = ( ( DelayedRef . Element ) aTerm ) . get ( ) ; _processElement ( aField , ( ElementDecl ) xsElementDecl ) ; } else LOGGER . info ( "Unsupported particle term " + aTerm ) ; |
public class ControlBar { /** * Add new control at the end of control bar with specified touch listener , control label and resource .
* Size of control bar is updated based on new number of controls .
* @ param name name of the control to remove
* @ param resId the control face
* @ param label the control label
* @ param listener touch listener */
public Widget addControl ( String name , int resId , String label , Widget . OnTouchListener listener ) { } } | return addControl ( name , resId , label , listener , - 1 ) ; |
public class GreenPepperServerServiceImpl { /** * { @ inheritDoc } */
public void removeRequirement ( Requirement requirement ) throws GreenPepperServerException { } } | try { sessionService . startSession ( ) ; sessionService . beginTransaction ( ) ; documentDao . removeRequirement ( requirement ) ; sessionService . commitTransaction ( ) ; log . debug ( "Removed Requirement: " + requirement . getName ( ) ) ; } catch ( Exception ex ) { sessionService . rollbackTransaction ( ) ; throw handleException ( REQUIREMENT_REMOVE_FAILED , ex ) ; } finally { sessionService . closeSession ( ) ; } |
public class VaultsInner { /** * Creates or updates a Recovery Services vault .
* @ param resourceGroupName The name of the resource group where the recovery services vault is present .
* @ param vaultName The name of the recovery services vault .
* @ param vault Recovery Services Vault to be created .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VaultInner > createOrUpdateAsync ( String resourceGroupName , String vaultName , VaultInner vault , final ServiceCallback < VaultInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , vaultName , vault ) , serviceCallback ) ; |
public class AbstractVFState { /** * { @ inheritDoc } */
@ Override final boolean add ( int n , int m ) { } } | if ( ! feasible ( n , m ) ) return false ; m1 [ n ] = m ; m2 [ m ] = n ; size = size + 1 ; for ( int w : g1 [ n ] ) if ( t1 [ w ] == 0 ) t1 [ w ] = size ; for ( int w : g2 [ m ] ) if ( t2 [ w ] == 0 ) t2 [ w ] = size ; return true ; |
public class DocBookBuilder { /** * Checks if the conditional pass should be performed .
* @ param buildData Information and data structures for the build .
* @ param specTopic The spec topic the conditions should be processed for ,
* @ param doc The DOM Document to process the conditions against . */
protected void processConditions ( final BuildData buildData , final SpecTopic specTopic , final Document doc ) { } } | final String condition = specTopic . getConditionStatement ( true ) ; DocBookUtilities . processConditions ( condition , doc , BuilderConstants . DEFAULT_CONDITION ) ; |
public class PaxChronology { @ Override public ValueRange range ( ChronoField field ) { } } | switch ( field ) { case ALIGNED_WEEK_OF_MONTH : return ALIGNED_WEEK_OF_MONTH_RANGE ; case ALIGNED_WEEK_OF_YEAR : return ALIGNED_WEEK_OF_YEAR_RANGE ; case DAY_OF_MONTH : return DAY_OF_MONTH_RANGE ; case DAY_OF_YEAR : return DAY_OF_YEAR_RANGE ; case MONTH_OF_YEAR : return MONTH_OF_YEAR_RANGE ; default : return field . range ( ) ; } |
public class Collections3 { /** * 提取集合中的对象的一个属性 ( 通过 Getter 函数 ) , 组合成由分割符分隔的字符串 。
* @ param collection
* 来源集合
* @ param propertyName
* 要提取的属性名
* @ param separator
* 分隔符
* @ return 组合字符串 */
public static String extractToString ( final Collection collection , final String propertyName , final String separator ) { } } | List list = extractToList ( collection , propertyName ) ; return StringUtils . join ( list , separator ) ; |
public class ShutdownHook { /** * Write script file to clean up extraction directory .
* The reason a script is required , rather than a Java
* file delete method or a simple Runtime . exec of an OS
* delete command is because neither work reliably . Even
* after the ' server run process ' has ended , Java methods
* and single OS commands have failed to reliably delete
* the extraction directory . This has been observed on
* both Windows and Unix ( Ubuntu ) platforms .
* It appears there is a lock release latency of some kind
* preventing one - shot deletion from working . So a script
* with a loop is the approach used here . Additionally ,
* launching a background script offers the added value
* of doing the delete in the background , even after the
* foreground process has terminated .
* @ param type is the platform type : unix ( 1 ) , windows ( 2 ) , or cygwin ( 3)
* @ return a script File object
* @ throws IOException */
private File writeCleanupFile ( int platformType ) throws IOException { } } | String fileSuffix = ".sh" ; if ( platformType == SelfExtractUtils . PlatformType_WINDOWS ) { fileSuffix = ".bat" ; } File file = File . createTempFile ( "wlpDelete" , fileSuffix ) ; if ( ! file . exists ( ) ) { boolean success = file . createNewFile ( ) ; if ( ! success ) { throw new IOException ( "Failed to create file " + file . getName ( ) ) ; } } BufferedWriter bw = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( file . getAbsoluteFile ( ) ) , "UTF-8" ) ) ; if ( platformType == SelfExtractUtils . PlatformType_UNIX ) { writeUnixCleanup ( file , bw ) ; } else if ( platformType == SelfExtractUtils . PlatformType_WINDOWS ) { writeWindowsCleanup ( file , bw ) ; } else if ( platformType == SelfExtractUtils . PlatformType_CYGWIN ) { writeCygwinCleanup ( file , bw ) ; } bw . close ( ) ; return file ; |
public class TrustRegionUpdateDogleg_F64 { /** * Computes the Cauchy step , This is only called if the Cauchy point lies after or on the trust region
* @ param regionRadius ( Input ) Trust region size
* @ param step ( Output ) The step */
protected void cauchyStep ( double regionRadius , DMatrixRMaj step ) { } } | CommonOps_DDRM . scale ( - regionRadius , direction , step ) ; stepLength = regionRadius ; // it touches the trust region
predictedReduction = regionRadius * ( owner . gradientNorm - 0.5 * regionRadius * gBg ) ; |
public class IntMap { /** * Associates values of another primitive map
* @ param m */
public void putAll ( IntMap < K > m ) { } } | m . entrySet ( ) . stream ( ) . forEach ( ( e ) -> { put ( e . getKey ( ) , e . getValue ( ) . value ) ; |
public class JSONObject { /** * Write a Key : value entry to a stream */
public static void writeJSONKV ( String key , Object value , Appendable out , JSONStyle compression ) throws IOException { } } | if ( key == null ) out . append ( "null" ) ; else if ( ! compression . mustProtectKey ( key ) ) out . append ( key ) ; else { out . append ( '"' ) ; JSONValue . escape ( key , out , compression ) ; out . append ( '"' ) ; } out . append ( ':' ) ; if ( value instanceof String ) compression . writeString ( out , ( String ) value ) ; else JSONValue . writeJSONString ( value , out , compression ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.