signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ErrorHandling { /** * This will report an error from a tag . The error will * contain a message . If error reporting is turned off , * the message will be returned and the caller should throw * a JspException to report the error . * @ param message - the message to register with the error * @ throws javax . servlet . jsp . JspException - if in - page error reporting is turned off this method will always * throw a JspException . */ public void registerTagError ( String message , String tagName , JspTag tag , Throwable e ) throws JspException { } }
assert ( message != null ) : "parameter 'message' must not be null." ; // add the error to the list of errors if ( _errors == null ) _errors = new ArrayList ( ) ; TagErrorInfo tei = new TagErrorInfo ( ) ; tei . tagType = tagName ; tei . message = message ; _errors . add ( tei ) ; IErrorReporter er = getErrorReporter ( tag ) ; if ( er == null ) { tei . errorNo = - 1 ; if ( ! reportErrorInPage ) { String s = Bundle . getString ( "Tags_NoInPageErrorReporting" , new Object [ ] { message } ) ; if ( e == null ) logger . error ( s ) ; else logger . error ( s , e ) ; // localRelease ( ) ; throw new JspException ( message ) ; } return ; } // add the error to the ErrorReporter tag er . addError ( tei ) ; assert ( tei . errorNo > 0 ) ; if ( ! reportErrorInPage ) { String s = Bundle . getString ( "Tags_NoInPageErrorReporting" , new Object [ ] { message } ) ; if ( e == null ) logger . error ( s ) ; else logger . error ( s , e ) ; // localRelease ( ) ; throw new JspException ( s ) ; } return ;
public class AtomCache { /** * Returns the representation of a { @ link ScopDomain } as a BioJava { @ link Structure } object . * @ param scopId * a SCOP Id * @ return a Structure object * @ throws IOException * @ throws StructureException */ public Structure getStructureForDomain ( String scopId ) throws IOException , StructureException { } }
return getStructureForDomain ( scopId , ScopFactory . getSCOP ( ) ) ;
public class Person { /** * The instance method sets the new password for the current context user . * Before the new password is set , some checks are made . * @ param _ newPasswd new Password to set * @ throws EFapsException on error * @ return true if password set , else false */ public Status setPassword ( final String _newPasswd ) throws EFapsException { } }
final Type type = CIAdminUser . Person . getType ( ) ; if ( _newPasswd . length ( ) == 0 ) { throw new EFapsException ( getClass ( ) , "PassWordLength" , 1 , _newPasswd . length ( ) ) ; } final Update update = new Update ( type , "" + getId ( ) ) ; final Status status = update . add ( CIAdminUser . Person . Password , _newPasswd ) ; if ( status . isOk ( ) ) { update . execute ( ) ; update . close ( ) ; } else { Person . LOG . error ( "Password could not be set by the Update, due to restrictions " + "e.g. length???" ) ; throw new EFapsException ( getClass ( ) , "TODO" ) ; } return status ;
public class ExtractorMojo { /** * Create CSV by using all the projects provided . */ private String createMultiProjectCSV ( ) { } }
String csvData = null ; List < IProject > projects = null ; IProject project = null ; if ( isProjectKeyProvided ( ) ) { project = getExtractor ( ) . getProject ( getProjectKey ( ) ) ; projects = new ArrayList < IProject > ( ) ; projects . add ( project ) ; } else if ( isProjectKeyPatternProvided ( ) ) { projects = getExtractor ( ) . getProjects ( getProjectKeyPattern ( ) ) ; } else { projects = getExtractor ( ) . getAllProjects ( ) ; } csvData = getConverter ( ) . getCSVData ( projects , getMeasureObjects ( ) , isCleanValues ( ) , isSurroundFields ( ) ) ; LOG_INFO . info ( "Retrieved projects and generated CSV data." ) ; return csvData ;
public class UcsApi { /** * Create a new contact * @ param createContactData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > createContactWithHttpInfo ( CreateContactData createContactData ) throws ApiException { } }
com . squareup . okhttp . Call call = createContactValidateBeforeCall ( createContactData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class IntegerRangeDropdownFilterComposite { /** * Calculates the ranges that the developer implemented in config . xml and * in MainContentPanel . ui . xml and puts them in an ArrayList that will be used * in setupRange ( String fieldName ) to fill the values of the ListBox */ private ArrayList < String > calculateRangeFromParameters ( ) { } }
ArrayList < String > allTheValues = new ArrayList < String > ( ) ; int minSampleSizeInteger = Integer . parseInt ( minSampleSize ) ; int maxSampleSizeInteger = Integer . parseInt ( maxSampleSize ) ; int incrementInteger = Integer . parseInt ( increment ) ; for ( int i = minSampleSizeInteger ; i < maxSampleSizeInteger ; i = i + incrementInteger ) { String value = Integer . toString ( i ) + " - " + Integer . toString ( i + incrementInteger ) ; allTheValues . add ( value ) ; } return allTheValues ;
public class ShardCache { /** * worry about concurrency . */ private void loadShardCache ( TableDefinition tableDef ) { } }
String appName = tableDef . getAppDef ( ) . getAppName ( ) ; String tableName = tableDef . getTableName ( ) ; m_logger . debug ( "Loading shard cache for {}.{}" , appName , tableName ) ; Date cacheDate = new Date ( ) ; String cacheKey = appName + "/" + tableName ; m_cacheMap . put ( cacheKey , cacheDate ) ; Map < String , Map < Integer , Date > > tableMap = m_appShardMap . get ( appName ) ; if ( tableMap == null ) { tableMap = new HashMap < > ( ) ; m_appShardMap . put ( appName , tableMap ) ; } Map < Integer , Date > shardMap = tableMap . get ( tableName ) ; if ( shardMap == null ) { shardMap = new HashMap < > ( ) ; tableMap . put ( tableName , shardMap ) ; } Tenant tenant = Tenant . getTenant ( tableDef ) ; String storeName = SpiderService . termsStoreName ( tableDef ) ; for ( DColumn col : DBService . instance ( tenant ) . getAllColumns ( storeName , SpiderTransaction . SHARDS_ROW_KEY ) ) { Integer shardNum = Integer . parseInt ( col . getName ( ) ) ; Date shardDate = new Date ( Long . parseLong ( col . getValue ( ) ) ) ; shardMap . put ( shardNum , shardDate ) ; }
public class AwsClientBuilder { /** * Sets the AWSCredentialsProvider used by the client . If not specified the default is { @ link * DefaultAWSCredentialsProviderChain } . * @ param credentialsProvider New AWSCredentialsProvider to use . */ public final void setCredentials ( AWSCredentialsProvider credentialsProvider ) { } }
this . credentials = credentialsProvider ; if ( null != this . iamEndpoint ) { if ( ( this . credentials . getCredentials ( ) instanceof IBMOAuthCredentials ) && ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) instanceof DefaultTokenManager ) { ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . setIamEndpoint ( iamEndpoint ) ; if ( ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . getProvider ( ) instanceof DefaultTokenProvider ) { ( ( DefaultTokenProvider ) ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . getProvider ( ) ) . setIamEndpoint ( iamEndpoint ) ; } } } if ( this . iamTokenRefreshOffset > 0 ) { if ( ( this . credentials . getCredentials ( ) instanceof IBMOAuthCredentials ) && ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) instanceof DefaultTokenManager ) { ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . setIamRefreshOffset ( iamTokenRefreshOffset ) ; } } if ( this . iamMaxRetry > 0 ) { if ( ( this . credentials . getCredentials ( ) instanceof IBMOAuthCredentials ) && ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) instanceof DefaultTokenManager ) { ( ( DefaultTokenManager ) ( ( IBMOAuthCredentials ) this . credentials . getCredentials ( ) ) . getTokenManager ( ) ) . setIamMaxRetry ( iamMaxRetry ) ; ; } }
public class Monitors { /** * Extract all fields / methods of { @ code obj } that have a monitor annotation and add them to * { @ code monitors } . */ static void addAnnotatedFields ( List < Monitor < ? > > monitors , String id , TagList tags , Object obj ) { } }
final Class < com . netflix . servo . annotations . Monitor > annoClass = com . netflix . servo . annotations . Monitor . class ; try { Set < Field > fields = getFieldsAnnotatedBy ( obj . getClass ( ) , annoClass ) ; for ( Field field : fields ) { final com . netflix . servo . annotations . Monitor anno = field . getAnnotation ( annoClass ) ; if ( anno != null ) { final MonitorConfig config = newConfig ( obj . getClass ( ) , field . getName ( ) , id , anno , tags ) ; if ( anno . type ( ) == DataSourceType . INFORMATIONAL ) { monitors . add ( new AnnotatedStringMonitor ( config , obj , field ) ) ; } else { checkType ( anno , field . getType ( ) , field . getDeclaringClass ( ) ) ; monitors . add ( new AnnotatedNumberMonitor ( config , obj , field ) ) ; } } } Set < Method > methods = getMethodsAnnotatedBy ( obj . getClass ( ) , annoClass ) ; for ( Method method : methods ) { final com . netflix . servo . annotations . Monitor anno = method . getAnnotation ( annoClass ) ; if ( anno != null ) { final MonitorConfig config = newConfig ( obj . getClass ( ) , method . getName ( ) , id , anno , tags ) ; if ( anno . type ( ) == DataSourceType . INFORMATIONAL ) { monitors . add ( new AnnotatedStringMonitor ( config , obj , method ) ) ; } else { checkType ( anno , method . getReturnType ( ) , method . getDeclaringClass ( ) ) ; monitors . add ( new AnnotatedNumberMonitor ( config , obj , method ) ) ; } } } } catch ( Exception e ) { throw Throwables . propagate ( e ) ; }
public class Locale { /** * Replies the text that corresponds to the specified resource . * @ param key is the name of the resource into the specified file * @ param defaultValue is the default value to replies if the resource does not contain the specified key . * @ param params is the the list of parameters which will * replaces the < code > # 1 < / code > , < code > # 2 < / code > . . . into the string . * @ return the text that corresponds to the specified resource */ @ Pure public static String getStringWithDefault ( String key , String defaultValue , Object ... params ) { } }
return getStringWithDefault ( ClassLoaderFinder . findClassLoader ( ) , detectResourceClass ( null ) , key , defaultValue , params ) ;
public class AmqpMessageHandlerService { /** * * Executed if a amqp message arrives . * @ param message * the message * @ param type * the type * @ param tenant * the tenant * @ param virtualHost * the virtual host * @ return the rpc message back to supplier . */ public Message onMessage ( final Message message , final String type , final String tenant , final String virtualHost ) { } }
if ( StringUtils . isEmpty ( type ) || StringUtils . isEmpty ( tenant ) ) { throw new AmqpRejectAndDontRequeueException ( "Invalid message! tenant and type header are mandatory!" ) ; } final SecurityContext oldContext = SecurityContextHolder . getContext ( ) ; try { final MessageType messageType = MessageType . valueOf ( type ) ; switch ( messageType ) { case THING_CREATED : setTenantSecurityContext ( tenant ) ; registerTarget ( message , virtualHost ) ; break ; case EVENT : checkContentTypeJson ( message ) ; setTenantSecurityContext ( tenant ) ; handleIncomingEvent ( message ) ; break ; case PING : if ( isCorrelationIdNotEmpty ( message ) ) { amqpMessageDispatcherService . sendPingReponseToDmfReceiver ( message , tenant , virtualHost ) ; } break ; default : logAndThrowMessageError ( message , "No handle method was found for the given message type." ) ; } } catch ( final IllegalArgumentException ex ) { throw new AmqpRejectAndDontRequeueException ( "Invalid message!" , ex ) ; } finally { SecurityContextHolder . setContext ( oldContext ) ; } return null ;
public class SortedGrouping { /** * Applies a GroupCombineFunction on a grouped { @ link DataSet } . * A CombineFunction is similar to a GroupReduceFunction but does not perform a full data exchange . Instead , the * CombineFunction calls the combine method once per partition for combining a group of results . This * operator is suitable for combining values into an intermediate format before doing a proper groupReduce where * the data is shuffled across the node for further reduction . The GroupReduce operator can also be supplied with * a combiner by implementing the RichGroupReduce function . The combine method of the RichGroupReduce function * demands input and output type to be the same . The CombineFunction , on the other side , can have an arbitrary * output type . * @ param combiner The GroupCombineFunction that is applied on the DataSet . * @ return A GroupCombineOperator which represents the combined DataSet . */ public < R > GroupCombineOperator < T , R > combineGroup ( GroupCombineFunction < T , R > combiner ) { } }
if ( combiner == null ) { throw new NullPointerException ( "GroupCombine function must not be null." ) ; } TypeInformation < R > resultType = TypeExtractor . getGroupCombineReturnTypes ( combiner , this . getInputDataSet ( ) . getType ( ) , Utils . getCallLocationName ( ) , true ) ; return new GroupCombineOperator < > ( this , resultType , inputDataSet . clean ( combiner ) , Utils . getCallLocationName ( ) ) ;
public class LinkedList { /** * Retrieves and removes the first element of this list , * or returns { @ code null } if this list is empty . * @ return the first element of this list , or { @ code null } if * this list is empty * @ since 1.6 */ public E pollFirst ( ) { } }
final Node < E > f = first ; return ( f == null ) ? null : unlinkFirst ( f ) ;
public class CommerceShipmentModelImpl { /** * Converts the soap model instances into normal model instances . * @ param soapModels the soap model instances to convert * @ return the normal model instances */ public static List < CommerceShipment > toModels ( CommerceShipmentSoap [ ] soapModels ) { } }
if ( soapModels == null ) { return null ; } List < CommerceShipment > models = new ArrayList < CommerceShipment > ( soapModels . length ) ; for ( CommerceShipmentSoap soapModel : soapModels ) { models . add ( toModel ( soapModel ) ) ; } return models ;
public class Channel { /** * Get signed byes of the update channel . * @ param updateChannelConfiguration * @ param signer * @ return * @ throws InvalidArgumentException */ public byte [ ] getUpdateChannelConfigurationSignature ( UpdateChannelConfiguration updateChannelConfiguration , User signer ) throws InvalidArgumentException { } }
userContextCheck ( signer ) ; if ( null == updateChannelConfiguration ) { throw new InvalidArgumentException ( "channelConfiguration is null" ) ; } try { TransactionContext transactionContext = getTransactionContext ( signer ) ; final ByteString configUpdate = ByteString . copyFrom ( updateChannelConfiguration . getUpdateChannelConfigurationAsBytes ( ) ) ; ByteString sigHeaderByteString = getSignatureHeaderAsByteString ( signer , transactionContext ) ; ByteString signatureByteSting = transactionContext . signByteStrings ( new User [ ] { signer } , sigHeaderByteString , configUpdate ) [ 0 ] ; return ConfigSignature . newBuilder ( ) . setSignatureHeader ( sigHeaderByteString ) . setSignature ( signatureByteSting ) . build ( ) . toByteArray ( ) ; } catch ( Exception e ) { throw new InvalidArgumentException ( e ) ; } finally { logger . debug ( "finally done" ) ; }
public class tmtrafficaction { /** * Use this API to add tmtrafficaction resources . */ public static base_responses add ( nitro_service client , tmtrafficaction resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { tmtrafficaction addresources [ ] = new tmtrafficaction [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new tmtrafficaction ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . apptimeout = resources [ i ] . apptimeout ; addresources [ i ] . sso = resources [ i ] . sso ; addresources [ i ] . formssoaction = resources [ i ] . formssoaction ; addresources [ i ] . persistentcookie = resources [ i ] . persistentcookie ; addresources [ i ] . initiatelogout = resources [ i ] . initiatelogout ; addresources [ i ] . kcdaccount = resources [ i ] . kcdaccount ; addresources [ i ] . samlssoprofile = resources [ i ] . samlssoprofile ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class JsonDBTemplate { /** * / * ( non - Javadoc ) * @ see io . jsondb . JsonDBOperations # createCollection ( java . lang . Class ) */ @ Override public < T > void createCollection ( Class < T > entityClass ) { } }
createCollection ( Util . determineCollectionName ( entityClass ) ) ;
public class ReactionSetManipulator { /** * Get all Reactions object containing a Molecule as a Product from a set of * Reactions . * @ param reactSet The set of reaction to inspect * @ param molecule The molecule to find as a product * @ return The IReactionSet */ public static IReactionSet getRelevantReactionsAsProduct ( IReactionSet reactSet , IAtomContainer molecule ) { } }
IReactionSet newReactSet = reactSet . getBuilder ( ) . newInstance ( IReactionSet . class ) ; for ( IReaction reaction : reactSet . reactions ( ) ) { for ( IAtomContainer atomContainer : reaction . getProducts ( ) . atomContainers ( ) ) if ( atomContainer . equals ( molecule ) ) newReactSet . addReaction ( reaction ) ; } return newReactSet ;
public class URLUtil { /** * Method that tries to get a stream ( ideally , optimal one ) to write to * the resource specified by given URL . * Currently it just means creating a simple file output stream if the * URL points to a ( local ) file , and otherwise relying on URL classes * input stream creation method . */ public static OutputStream outputStreamFromURL ( URL url ) throws IOException { } }
if ( "file" . equals ( url . getProtocol ( ) ) ) { /* As per [ WSTX - 82 ] , can not do this if the path refers * to a network drive on windows . */ String host = url . getHost ( ) ; if ( host == null || host . length ( ) == 0 ) { return new FileOutputStream ( url . getPath ( ) ) ; } } return url . openConnection ( ) . getOutputStream ( ) ;
public class MapWithProtoValuesSubject { /** * Compares float fields with these explicitly specified top - level field numbers using the * provided absolute tolerance . * @ param tolerance A finite , non - negative tolerance . */ public MapWithProtoValuesFluentAssertion < M > usingFloatToleranceForFieldsForValues ( float tolerance , int firstFieldNumber , int ... rest ) { } }
return usingConfig ( config . usingFloatToleranceForFields ( tolerance , asList ( firstFieldNumber , rest ) ) ) ;
public class OtpCookedConnection { /** * pass the message to the node for final delivery . Note that the connection * itself needs to know about links ( in case of connection failure ) , so we * snoop for link / unlink too here . */ @ Override public void deliver ( final OtpMsg msg ) { } }
final boolean delivered = self . deliver ( msg ) ; switch ( msg . type ( ) ) { case OtpMsg . linkTag : if ( delivered ) { links . addLink ( msg . getRecipientPid ( ) , msg . getSenderPid ( ) ) ; } else { try { // no such pid - send exit to sender super . sendExit ( msg . getRecipientPid ( ) , msg . getSenderPid ( ) , new OtpErlangAtom ( "noproc" ) ) ; } catch ( final IOException e ) { } } break ; case OtpMsg . unlinkTag : case OtpMsg . exitTag : links . removeLink ( msg . getRecipientPid ( ) , msg . getSenderPid ( ) ) ; break ; case OtpMsg . exit2Tag : break ; } return ;
public class DBManagerService { /** * Get the DBService for the default database . This object is created when the * DBManagerService is started . * @ return { @ link DBService } for the defaultdatabase . */ public DBService getDefaultDB ( ) { } }
String defaultTenantName = TenantService . instance ( ) . getDefaultTenantName ( ) ; synchronized ( m_tenantDBMap ) { DBService dbservice = m_tenantDBMap . get ( defaultTenantName ) ; assert dbservice != null : "Database for default tenant not found" ; return dbservice ; }
public class ListDomainNamesResult { /** * The names of the search domains owned by an account . * @ param domainNames * The names of the search domains owned by an account . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListDomainNamesResult withDomainNames ( java . util . Map < String , String > domainNames ) { } }
setDomainNames ( domainNames ) ; return this ;
public class RecoveryDirectorImpl { /** * Internal method to initiate recovery processing of the given FailureScope . All * registered RecoveryAgent objects will be directed to process the FailureScope * in sequence . * @ param FailureScope The FailureScope to process . * @ return boolean success */ @ Override public void directInitialization ( FailureScope failureScope ) throws RecoveryFailedException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "directInitialization" , new Object [ ] { failureScope , this } ) ; // Use configuration to determine if recovery is local ( for z / OS ) . final FailureScope currentFailureScope = Configuration . localFailureScope ( ) ; /* @ LI1578-22A */ // Synchronize to ensure consistency with the registerService // method . The remainder of the method is not synchronized on this in order that two independant // recovery processes may be driven concurrently on two different threads . synchronized ( _registeredRecoveryAgents ) { // Ensure that further RecoveryAgent registrations are prohibited . _registrationAllowed = false ; } if ( currentFailureScope . equals ( failureScope ) ) /* @ LI1578-22C */ { Tr . info ( tc , "CWRLS0010_PERFORM_LOCAL_RECOVERY" , failureScope . serverName ( ) ) ; } else { Tr . info ( tc , "CWRLS0011_PERFORM_PEER_RECOVERY" , failureScope . serverName ( ) ) ; } // Extract the ' values ' collection from the _ registeredRecoveryAgents map and create an iterator // from it . This iterator will return ArrayList objects each containing a set of RecoveryAgent // objects . Each ArrayList corrisponds to a different sequence priority value . final Collection registeredRecoveryAgentsValues = _registeredRecoveryAgents . values ( ) ; Iterator registeredRecoveryAgentsValuesIterator = registeredRecoveryAgentsValues . iterator ( ) ; while ( registeredRecoveryAgentsValuesIterator . hasNext ( ) ) { // Extract the next ArrayList and create an iterator from it . This iterator will return RecoveryAgent // objects that are registered at the same sequence priority value . final ArrayList registeredRecoveryAgentsArray = ( java . util . ArrayList ) registeredRecoveryAgentsValuesIterator . next ( ) ; final Iterator registeredRecoveryAgentsArrayIterator = registeredRecoveryAgentsArray . iterator ( ) ; while ( registeredRecoveryAgentsArrayIterator . hasNext ( ) ) { // Extract the next RecoveryAgent object final RecoveryAgent recoveryAgent = ( RecoveryAgent ) registeredRecoveryAgentsArrayIterator . next ( ) ; recoveryAgent . prepareForRecovery ( failureScope ) ; // Prepare the maps for the recovery event . addInitializationRecord ( recoveryAgent , failureScope ) ; addRecoveryRecord ( recoveryAgent , failureScope ) ; } } // This is the opportunity to kick off any Network Parition Detection logic that we deem necessary . Right // now we are relying on the Hardware quorum support within the HA framework itself if NP ' s are tro be // handled . if ( Configuration . HAEnabled ( ) ) { // Join the " dynamic cluster " in order that IOR references can be associated with the // resulting identity . Only do this in an HA - enabled environment . Configuration . getRecoveryLogComponent ( ) . joinCluster ( failureScope ) ; } // If callbacks are registered , drive then now . if ( _registeredCallbacks != null ) { driveCallBacks ( CALLBACK_RECOVERYSTARTED , failureScope ) ; } // Re - set the iterator . registeredRecoveryAgentsValuesIterator = registeredRecoveryAgentsValues . iterator ( ) ; while ( registeredRecoveryAgentsValuesIterator . hasNext ( ) ) { // Extract the next ArrayList and create an iterator from it . This iterator will return RecoveryAgent // objects that are registered at the same sequence priority value . final ArrayList registeredRecoveryAgentsArray = ( java . util . ArrayList ) registeredRecoveryAgentsValuesIterator . next ( ) ; final Iterator registeredRecoveryAgentsArrayIterator = registeredRecoveryAgentsArray . iterator ( ) ; while ( registeredRecoveryAgentsArrayIterator . hasNext ( ) ) { // Extract the next RecoveryAgent object final RecoveryAgent recoveryAgent = ( RecoveryAgent ) registeredRecoveryAgentsArrayIterator . next ( ) ; // Direct the RecoveryAgent instance to process this failure scope . try { // Notify the listeners we ' re about to make the call _eventListeners . clientRecoveryInitiated ( failureScope , recoveryAgent . clientIdentifier ( ) ) ; /* @ MD19638A */ recoveryAgent . initiateRecovery ( failureScope ) ; } catch ( RecoveryFailedException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoveryDirectorImpl.directInitialization" , "410" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "directInitialization" , exc ) ; throw exc ; } // Wait for ' serialRecoveryComplete ' to be called . This callback may be issued from another thread . synchronized ( _outstandingInitializationRecords ) { while ( initializationOutstanding ( recoveryAgent , failureScope ) ) { try { _outstandingInitializationRecords . wait ( ) ; } catch ( InterruptedException exc ) { // This exception is recieved if another thread interrupts this thread by calling this threads // Thread . interrupt method . The RecoveryDirectorImpl class does not use this mechanism for // breaking out of the wait call - it uses notifyAll to wake up all waiting threads . This // exception should never be generated . If for some reason it is called then ignore it and // start to wait again . FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoveryDirectorImpl.directInitialization" , "432" , this ) ; } } } } } if ( currentFailureScope . equals ( failureScope ) ) /* @ LI1578-22C */ { Tr . info ( tc , "CWRLS0012_DIRECT_LOCAL_RECOVERY" , failureScope . serverName ( ) ) ; } else { Tr . info ( tc , "CWRLS0013_DIRECT_PEER_RECOVERY" , failureScope . serverName ( ) ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "directInitialization" ) ;
public class Payments { /** * 根据transactionId查询退款记录 * @ param transactionId * @ return */ public RefundQuery refundQueryByTransactionId ( String transactionId ) { } }
RefundQueryRequestWrapper refundQueryRequestWrapper = new RefundQueryRequestWrapper ( ) ; refundQueryRequestWrapper . setTransactionId ( transactionId ) ; return refundQuery ( refundQueryRequestWrapper ) ;
public class ProjectTask { /** * Add this field in the Record ' s field sequence . */ public BaseField setupField ( int iFieldSeq ) { } }
BaseField field = null ; // if ( iFieldSeq = = 0) // field = new CounterField ( this , ID , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 1) // field = new RecordChangedField ( this , LAST _ CHANGED , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 2) // field = new BooleanField ( this , DELETED , Constants . DEFAULT _ FIELD _ LENGTH , null , new Boolean ( false ) ) ; // field . setHidden ( true ) ; if ( iFieldSeq == 3 ) field = new StringField ( this , NAME , 120 , null , null ) ; if ( iFieldSeq == 4 ) field = new ProjectTaskField ( this , PARENT_PROJECT_TASK_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 5 ) { field = new ShortField ( this , SEQUENCE , Constants . DEFAULT_FIELD_LENGTH , null , new Short ( ( short ) 0 ) ) ; field . setNullable ( false ) ; } // if ( iFieldSeq = = 6) // field = new MemoField ( this , COMMENT , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // if ( iFieldSeq = = 7) // field = new StringField ( this , CODE , 30 , null , null ) ; if ( iFieldSeq == 8 ) field = new DateTimeField ( this , START_DATE_TIME , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 9 ) { field = new RealField ( this , DURATION , Constants . DEFAULT_FIELD_LENGTH , null , new Double ( 1 ) ) ; field . addListener ( new InitOnceFieldHandler ( null ) ) ; } if ( iFieldSeq == 10 ) { field = new DateTimeField ( this , END_DATE_TIME , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setVirtual ( true ) ; } if ( iFieldSeq == 11 ) field = new PercentField ( this , PROGRESS , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 12 ) field = new ProjectFilter ( this , PROJECT_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 13 ) field = new ProjectVersionField ( this , PROJECT_VERSION_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 14 ) field = new IssueStatusField ( this , PROJECT_TYPE_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 15 ) field = new IssueStatusField ( this , PROJECT_STATUS_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 16 ) field = new UserField ( this , ASSIGNED_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 17 ) field = new IssuePriorityField ( this , PROJECT_PRIORITY_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 18 ) field = new ProjectTask_EnteredDate ( this , ENTERED_DATE , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 19 ) field = new UserField ( this , ENTERED_BY_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 20 ) field = new DateTimeField ( this , CHANGED_DATE , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 21 ) field = new UserField ( this , CHANGED_BY_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 22 ) field = new BooleanField ( this , HAS_CHILDREN , Constants . DEFAULT_FIELD_LENGTH , null , new Boolean ( false ) ) ; if ( field == null ) field = super . setupField ( iFieldSeq ) ; return field ;
public class Context { /** * Helper method used to work around logic errors related to the recursive * nature of the JSONLD - API Context Processing Algorithm . * @ param localContext * The Local Context object . * @ param remoteContexts * The list of Strings denoting the remote Context URLs . * @ param parsingARemoteContext * True if localContext represents a remote context that has been * parsed and sent into this method and false otherwise . This * must be set to know whether to propagate the @ code { @ base } key * from the context to the result . * @ return The parsed and merged Context . * @ throws JsonLdError * If there is an error parsing the contexts . */ private Context parse ( Object localContext , List < String > remoteContexts , boolean parsingARemoteContext ) throws JsonLdError { } }
if ( remoteContexts == null ) { remoteContexts = new ArrayList < String > ( ) ; } // 1 . Initialize result to the result of cloning active context . Context result = this . clone ( ) ; // TODO : clone ? if ( ! ( localContext instanceof List ) ) { final Object temp = localContext ; localContext = new ArrayList < Object > ( ) ; ( ( List < Object > ) localContext ) . add ( temp ) ; } for ( final Object context : ( ( List < Object > ) localContext ) ) { // 3.1) if ( context == null ) { result = new Context ( this . options ) ; continue ; } else if ( context instanceof Context ) { result = ( ( Context ) context ) . clone ( ) ; } // 3.2) else if ( context instanceof String ) { String uri = ( String ) result . get ( JsonLdConsts . BASE ) ; uri = JsonLdUrl . resolve ( uri , ( String ) context ) ; // 3.2.2 if ( remoteContexts . contains ( uri ) ) { throw new JsonLdError ( Error . RECURSIVE_CONTEXT_INCLUSION , uri ) ; } remoteContexts . add ( uri ) ; // 3.2.3 : Dereference context final RemoteDocument rd = this . options . getDocumentLoader ( ) . loadDocument ( uri ) ; final Object remoteContext = rd . getDocument ( ) ; if ( ! ( remoteContext instanceof Map ) || ! ( ( Map < String , Object > ) remoteContext ) . containsKey ( JsonLdConsts . CONTEXT ) ) { // If the dereferenced document has no top - level JSON object // with an @ context member throw new JsonLdError ( Error . INVALID_REMOTE_CONTEXT , context ) ; } final Object tempContext = ( ( Map < String , Object > ) remoteContext ) . get ( JsonLdConsts . CONTEXT ) ; // 3.2.4 result = result . parse ( tempContext , remoteContexts , true ) ; // 3.2.5 continue ; } else if ( ! ( context instanceof Map ) ) { // 3.3 throw new JsonLdError ( Error . INVALID_LOCAL_CONTEXT , context ) ; } checkEmptyKey ( ( Map < String , Object > ) context ) ; // 3.4 if ( ! parsingARemoteContext && ( ( Map < String , Object > ) context ) . containsKey ( JsonLdConsts . BASE ) ) { // 3.4.1 final Object value = ( ( Map < String , Object > ) context ) . get ( JsonLdConsts . BASE ) ; // 3.4.2 if ( value == null ) { result . remove ( JsonLdConsts . BASE ) ; } else if ( value instanceof String ) { // 3.4.3 if ( JsonLdUtils . isAbsoluteIri ( ( String ) value ) ) { result . put ( JsonLdConsts . BASE , value ) ; } else { // 3.4.4 final String baseUri = ( String ) result . get ( JsonLdConsts . BASE ) ; if ( ! JsonLdUtils . isAbsoluteIri ( baseUri ) ) { throw new JsonLdError ( Error . INVALID_BASE_IRI , baseUri ) ; } result . put ( JsonLdConsts . BASE , JsonLdUrl . resolve ( baseUri , ( String ) value ) ) ; } } else { // 3.4.5 throw new JsonLdError ( JsonLdError . Error . INVALID_BASE_IRI , "@base must be a string" ) ; } } // 3.5 if ( ( ( Map < String , Object > ) context ) . containsKey ( JsonLdConsts . VOCAB ) ) { final Object value = ( ( Map < String , Object > ) context ) . get ( JsonLdConsts . VOCAB ) ; if ( value == null ) { result . remove ( JsonLdConsts . VOCAB ) ; } else if ( value instanceof String ) { if ( JsonLdUtils . isAbsoluteIri ( ( String ) value ) ) { result . put ( JsonLdConsts . VOCAB , value ) ; } else { throw new JsonLdError ( Error . INVALID_VOCAB_MAPPING , "@value must be an absolute IRI" ) ; } } else { throw new JsonLdError ( Error . INVALID_VOCAB_MAPPING , "@vocab must be a string or null" ) ; } } // 3.6 if ( ( ( Map < String , Object > ) context ) . containsKey ( JsonLdConsts . LANGUAGE ) ) { final Object value = ( ( Map < String , Object > ) context ) . get ( JsonLdConsts . LANGUAGE ) ; if ( value == null ) { result . remove ( JsonLdConsts . LANGUAGE ) ; } else if ( value instanceof String ) { result . put ( JsonLdConsts . LANGUAGE , ( ( String ) value ) . toLowerCase ( ) ) ; } else { throw new JsonLdError ( Error . INVALID_DEFAULT_LANGUAGE , value ) ; } } // 3.7 final Map < String , Boolean > defined = new LinkedHashMap < String , Boolean > ( ) ; for ( final String key : ( ( Map < String , Object > ) context ) . keySet ( ) ) { if ( JsonLdConsts . BASE . equals ( key ) || JsonLdConsts . VOCAB . equals ( key ) || JsonLdConsts . LANGUAGE . equals ( key ) ) { continue ; } result . createTermDefinition ( ( Map < String , Object > ) context , key , defined ) ; } } return result ;
public class ProviderManager { /** * Returns the IQ provider registered to the specified XML element name and namespace . * For example , if a provider was registered to the element name " query " and the * namespace " jabber : iq : time " , then the following stanza would trigger the provider : * < pre > * & lt ; iq type = ' result ' to = ' joe @ example . com ' from = ' mary @ example . com ' id = ' time _ 1 ' & gt ; * & lt ; query xmlns = ' jabber : iq : time ' & gt ; * & lt ; utc & gt ; 20020910T17:58:35 & lt ; / utc & gt ; * & lt ; tz & gt ; MDT & lt ; / tz & gt ; * & lt ; display & gt ; Tue Sep 10 12:58:35 2002 & lt ; / display & gt ; * & lt ; / query & gt ; * & lt ; / iq & gt ; < / pre > * < p > Note : this method is generally only called by the internal Smack classes . * @ param elementName the XML element name . * @ param namespace the XML namespace . * @ return the IQ provider . */ public static IQProvider < IQ > getIQProvider ( String elementName , String namespace ) { } }
String key = getKey ( elementName , namespace ) ; return iqProviders . get ( key ) ;
public class ScriptExecutor { /** * Execute a CQL script template located in the class path and * inject provided values into the template to produce the actual script * @ param scriptTemplateLocation the location of the script template in the class path * @ param values template values */ public void executeScriptTemplate ( String scriptTemplateLocation , Map < String , Object > values ) { } }
final List < SimpleStatement > statements = buildStatements ( loadScriptAsLines ( scriptTemplateLocation , values ) ) ; for ( SimpleStatement statement : statements ) { if ( isDMLStatement ( statement ) ) { DML_LOGGER . debug ( "\tSCRIPT : {}\n" , statement . getQueryString ( ) ) ; } else { DDL_LOGGER . debug ( "\tSCRIPT : {}\n" , statement . getQueryString ( ) ) ; } session . execute ( statement ) ; }
public class AbstractEndpointParser { /** * Subclasses can override this parsing method in order to provide proper endpoint configuration bean definition properties . * @ param endpointConfigurationBuilder * @ param element * @ param parserContext * @ return */ protected void parseEndpointConfiguration ( BeanDefinitionBuilder endpointConfigurationBuilder , Element element , ParserContext parserContext ) { } }
BeanDefinitionParserUtils . setPropertyValue ( endpointConfigurationBuilder , element . getAttribute ( "timeout" ) , "timeout" ) ;
public class ListLocalContext { /** * Gets an item to the left or right of the central item in this * context . Negative offsets get an item on the left ( e . g . , - 2 gets * the second item on the left ) and positive offsets get an item on * the right . If { @ code relativeOffset } refers to a word off the end * of the sequence , then { @ code endFunction } is invoked to produce the * return value . * @ param relativeOffset * @ return */ @ Override public I getItem ( int relativeOffset , Function < ? super Integer , I > endFunction ) { } }
int index = wordIndex + relativeOffset ; if ( index < 0 ) { return endFunction . apply ( index ) ; } else if ( index >= items . size ( ) ) { int endWordIndex = index - ( items . size ( ) - 1 ) ; return endFunction . apply ( endWordIndex ) ; } else { return items . get ( index ) ; }
public class Md5Utils { /** * Computes the MD5 hash of the given data and returns it as an array of * bytes . */ public static byte [ ] computeMD5Hash ( byte [ ] input ) { } }
try { MessageDigest md = MessageDigest . getInstance ( "MD5" ) ; return md . digest ( input ) ; } catch ( NoSuchAlgorithmException e ) { // should never get here throw new IllegalStateException ( e ) ; }
public class CirculantGraph { /** * Required configuration for each range of offsets in the graph . * @ param offset first offset appointing the vertices ' position * @ param length number of contiguous offsets in range * @ return this */ public CirculantGraph addRange ( long offset , long length ) { } }
Preconditions . checkArgument ( offset >= MINIMUM_OFFSET , "Range offset must be at least " + MINIMUM_OFFSET ) ; Preconditions . checkArgument ( length <= vertexCount - offset , "Range length must not be greater than the vertex count minus the range offset." ) ; offsetRanges . add ( new OffsetRange ( offset , length ) ) ; return this ;
public class Parser { /** * 11.12 Conditional Expression */ private ParseTree parseConditional ( Expression expressionIn ) { } }
SourcePosition start = getTreeStartLocation ( ) ; ParseTree condition = parseLogicalOR ( expressionIn ) ; if ( peek ( TokenType . QUESTION ) ) { eat ( TokenType . QUESTION ) ; ParseTree left = parseAssignment ( expressionIn ) ; eat ( TokenType . COLON ) ; ParseTree right = parseAssignment ( expressionIn ) ; return new ConditionalExpressionTree ( getTreeLocation ( start ) , condition , left , right ) ; } return condition ;
public class Channel { /** * If the logo is local , you can provide a drawable instead of a URL * Provide the id from R . drawable . { id } as a String * @ param id resource name of your logo * @ param yourPackageName Package name of your app ( should be a temporary thing ) * @ return Itself */ public Channel setLogoDrawable ( String id , String yourPackageName ) { } }
String endpoint = "android.resource://" + id + "/drawable/" ; this . logoUrl = endpoint + id ; return this ;
public class MediaPanel { /** * Performs the actual painting of the media panel . Derived methods can override this method if * they wish to perform pre - and / or post - paint activities or if they wish to provide their own * painting mechanism entirely . */ protected void paint ( Graphics2D gfx , Rectangle [ ] dirty ) { } }
int dcount = dirty . length ; for ( int ii = 0 ; ii < dcount ; ii ++ ) { Rectangle clip = dirty [ ii ] ; // sanity - check the dirty rectangle if ( clip == null ) { log . warning ( "Found null dirty rect painting media panel?!" , new Exception ( ) ) ; continue ; } // constrain this dirty region to the bounds of the component constrainToBounds ( clip ) ; // ignore rectangles that were reduced to nothingness if ( clip . width == 0 || clip . height == 0 ) { continue ; } // clip to this dirty region clipToDirtyRegion ( gfx , clip ) ; // paint the region paintDirtyRect ( gfx , clip ) ; }
public class TruggerGenericTypeResolver { /** * Resolves the generic parameter name of a class . * @ param parameterName the parameter name * @ param target the target class * @ return the parameter class . */ static Class < ? > resolveParameterName ( String parameterName , Class < ? > target ) { } }
Map < Type , Type > typeVariableMap = getTypeVariableMap ( target ) ; Set < Entry < Type , Type > > set = typeVariableMap . entrySet ( ) ; Type type = Object . class ; for ( Entry < Type , Type > entry : set ) { if ( entry . getKey ( ) . toString ( ) . equals ( parameterName ) ) { type = entry . getKey ( ) ; break ; } } return resolveType ( type , typeVariableMap ) ;
public class TokenSub { /** * Given a static config map , substitute occurrences of $ { HERON _ * } variables * in the provided URL * @ param config a static map config object of key value pairs * @ param pathString string representing a path including $ { HERON _ * } variables * @ return String string that represents the modified path */ private static String substituteURL ( Config config , String pathString ) { } }
Matcher m = URL_PATTERN . matcher ( pathString ) ; if ( m . matches ( ) ) { return String . format ( "%s://%s" , m . group ( 1 ) , substitute ( config , m . group ( 2 ) ) ) ; } return pathString ;
public class CSSColorHelper { /** * Get the passed values as CSS HSLA color value * @ param nHue * Hue - is scaled to 0-359 * @ param nSaturation * Saturation - is scaled to 0-100 * @ param nLightness * Lightness - is scaled to 0-100 * @ param fOpacity * Opacity - is scaled to 0-1 * @ return The CSS string to use */ @ Nonnull @ Nonempty public static String getHSLAColorValue ( final int nHue , final int nSaturation , final int nLightness , final float fOpacity ) { } }
return new StringBuilder ( 32 ) . append ( CCSSValue . PREFIX_HSLA_OPEN ) . append ( getHSLHueValue ( nHue ) ) . append ( ',' ) . append ( getHSLPercentageValue ( nSaturation ) ) . append ( "%," ) . append ( getHSLPercentageValue ( nLightness ) ) . append ( "%," ) . append ( getOpacityToUse ( fOpacity ) ) . append ( CCSSValue . SUFFIX_HSLA_CLOSE ) . toString ( ) ;
public class DefaultImageFormatChecker { /** * Checks if imageHeaderBytes starts with SOI ( start of image ) marker , followed by 0xFF . * If headerSize is lower than 3 false is returned . * Description of jpeg format can be found here : * < a href = " http : / / www . w3 . org / Graphics / JPEG / itu - t81 . pdf " > * http : / / www . w3 . org / Graphics / JPEG / itu - t81 . pdf < / a > * Annex B deals with compressed data format * @ param imageHeaderBytes * @ param headerSize * @ return true if imageHeaderBytes starts with SOI _ BYTES and headerSize > = 3 */ private static boolean isJpegHeader ( final byte [ ] imageHeaderBytes , final int headerSize ) { } }
return headerSize >= JPEG_HEADER . length && ImageFormatCheckerUtils . startsWithPattern ( imageHeaderBytes , JPEG_HEADER ) ;
public class RiakClient { /** * NB : IntelliJ will see the above @ see statement as invalid , but it ' s correct : https : / / bugs . openjdk . java . net / browse / JDK - 8031625 */ public static RiakClient newClient ( RiakNode . Builder nodeBuilder , List < String > addresses ) throws UnknownHostException { } }
final RiakCluster cluster = new RiakCluster . Builder ( nodeBuilder , addresses ) . build ( ) ; cluster . start ( ) ; return new RiakClient ( cluster ) ;
public class DB { /** * Clears a single taxonomy term associated with a post . * @ param postId The post id . * @ param taxonomyTermId The taxonomy term id . * @ throws SQLException on database error . */ public void clearPostTerm ( final long postId , final long taxonomyTermId ) throws SQLException { } }
Connection conn = null ; PreparedStatement stmt = null ; Timer . Context ctx = metrics . postTermsClearTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( clearPostTermSQL ) ; stmt . setLong ( 1 , postId ) ; stmt . setLong ( 2 , taxonomyTermId ) ; stmt . executeUpdate ( ) ; } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt ) ; }
public class GroovyDataReportConnector { /** * { @ inheritDoc } */ @ Override public void runReport ( Map < String , Object > extra ) { } }
try { rows = new ArrayList < Map < String , Object > > ( ) ; Script script = groovyShell . parse ( groovyScript ) ; script . setBinding ( new Binding ( ) ) ; script . getBinding ( ) . setVariable ( "rows" , rows ) ; script . getBinding ( ) . setVariable ( "columns" , getColumns ( ) ) ; script . getBinding ( ) . setVariable ( "page" , getPage ( ) ) ; script . getBinding ( ) . setVariable ( "pageLimit" , getPageLimit ( ) ) ; script . getBinding ( ) . setVariable ( "paramConfig" , getParameterConfig ( ) ) ; script . getBinding ( ) . setVariable ( "param" , new HashMap < String , ParamConfig > ( ) { { for ( ParamConfig paramConfig : getParameterConfig ( ) ) { put ( paramConfig . getId ( ) , paramConfig ) ; } } } ) ; script . getBinding ( ) . setVariable ( "extra" , extra ) ; script . run ( ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; getErrors ( ) . add ( ex . getMessage ( ) ) ; }
public class MultiDimensionalMap { /** * Thread safe sorted map implementation * @ param < K > * @ param < T > * @ param < V > * @ return */ public static < K , T , V > MultiDimensionalMap < K , T , V > newThreadSafeTreeBackedMap ( ) { } }
return new MultiDimensionalMap < > ( new ConcurrentSkipListMap < Pair < K , T > , V > ( ) ) ;
public class FastaFormat { /** * method to convert all Peptides and RNAs into the natural analogue * sequence and generates HELM2Notation * @ param helm2Notation { @ link HELM2Notation } * @ return analog helm2notation * @ throws FastaFormatException if it can not be converted to analog sequence * @ throws AnalogSequenceException * if the natural analogue sequence can not be produced * @ throws ChemistryException if chemistry engine can not be initialized * @ throws CTKException general ChemToolKit exception passed to HELMToolKit */ public static HELM2Notation convertIntoAnalogSequence ( HELM2Notation helm2Notation ) throws FastaFormatException , AnalogSequenceException , ChemistryException , CTKException { } }
initMapAminoAcid ( ) ; initMapNucleotides ( ) ; initMapNucleotidesNaturalAnalog ( ) ; initMapTransformNucleotides ( ) ; /* * transform / convert only the peptides + rnas into the analog sequence */ List < PolymerNotation > polymers = helm2Notation . getListOfPolymers ( ) ; for ( int i = 0 ; i < helm2Notation . getListOfPolymers ( ) . size ( ) ; i ++ ) { if ( helm2Notation . getListOfPolymers ( ) . get ( i ) . getPolymerID ( ) instanceof RNAEntity ) { helm2Notation . getListOfPolymers ( ) . set ( i , convertRNAIntoAnalogSequence ( polymers . get ( i ) ) ) ; } if ( helm2Notation . getListOfPolymers ( ) . get ( i ) . getPolymerID ( ) instanceof PeptideEntity ) { helm2Notation . getListOfPolymers ( ) . set ( i , convertPeptideIntoAnalogSequence ( polymers . get ( i ) ) ) ; } } return helm2Notation ;
public class Geometry { /** * Center the columns of a matrix ( in - place ) . */ public static FloatMatrix centerColumns ( FloatMatrix x ) { } }
FloatMatrix temp = new FloatMatrix ( x . rows ) ; for ( int c = 0 ; c < x . columns ; c ++ ) x . putColumn ( c , center ( x . getColumn ( c , temp ) ) ) ; return x ;
public class PayRefundRequest { /** * 扩展信息 */ @ Override public void checkVaild ( ) { } }
super . checkVaild ( ) ; if ( this . refundmoney < 1 ) throw new RuntimeException ( "refundmoney is illegal" ) ; if ( this . paymoney < 1 ) throw new RuntimeException ( "paymoney is illegal" ) ; if ( this . refundno == null || this . refundno . isEmpty ( ) ) throw new RuntimeException ( "refundno is illegal" ) ; if ( this . thirdpayno == null || this . thirdpayno . isEmpty ( ) ) throw new RuntimeException ( "thirdpayno is illegal" ) ; if ( this . clientAddr == null || this . clientAddr . isEmpty ( ) ) throw new RuntimeException ( "clientAddr is illegal" ) ;
public class log { /** * Sends an INFO log message . * @ param message The message you would like logged . * @ param throwable An exception to log */ public static int i ( String message , Throwable throwable ) { } }
return logger ( QuickUtils . INFO , message , throwable ) ;
public class Table { /** * Returns a table with the same columns as this table , but no data */ public Table emptyCopy ( ) { } }
Table copy = new Table ( name ) ; for ( Column < ? > column : columnList ) { copy . addColumns ( column . emptyCopy ( ) ) ; } return copy ;
public class InstanceResource { /** * A put request for renewing lease from a client instance . * @ param isReplication * a header parameter containing information whether this is * replicated from other nodes . * @ param overriddenStatus * overridden status if any . * @ param status * the { @ link InstanceStatus } of the instance . * @ param lastDirtyTimestamp * last timestamp when this instance information was updated . * @ return response indicating whether the operation was a success or * failure . */ @ PUT public Response renewLease ( @ HeaderParam ( PeerEurekaNode . HEADER_REPLICATION ) String isReplication , @ QueryParam ( "overriddenstatus" ) String overriddenStatus , @ QueryParam ( "status" ) String status , @ QueryParam ( "lastDirtyTimestamp" ) String lastDirtyTimestamp ) { } }
boolean isFromReplicaNode = "true" . equals ( isReplication ) ; boolean isSuccess = registry . renew ( app . getName ( ) , id , isFromReplicaNode ) ; // Not found in the registry , immediately ask for a register if ( ! isSuccess ) { logger . warn ( "Not Found (Renew): {} - {}" , app . getName ( ) , id ) ; return Response . status ( Status . NOT_FOUND ) . build ( ) ; } // Check if we need to sync based on dirty time stamp , the client // instance might have changed some value Response response ; if ( lastDirtyTimestamp != null && serverConfig . shouldSyncWhenTimestampDiffers ( ) ) { response = this . validateDirtyTimestamp ( Long . valueOf ( lastDirtyTimestamp ) , isFromReplicaNode ) ; // Store the overridden status since the validation found out the node that replicates wins if ( response . getStatus ( ) == Response . Status . NOT_FOUND . getStatusCode ( ) && ( overriddenStatus != null ) && ! ( InstanceStatus . UNKNOWN . name ( ) . equals ( overriddenStatus ) ) && isFromReplicaNode ) { registry . storeOverriddenStatusIfRequired ( app . getAppName ( ) , id , InstanceStatus . valueOf ( overriddenStatus ) ) ; } } else { response = Response . ok ( ) . build ( ) ; } logger . debug ( "Found (Renew): {} - {}; reply status={}" , app . getName ( ) , id , response . getStatus ( ) ) ; return response ;
public class PutMailboxPermissionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PutMailboxPermissionsRequest putMailboxPermissionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( putMailboxPermissionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putMailboxPermissionsRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( putMailboxPermissionsRequest . getEntityId ( ) , ENTITYID_BINDING ) ; protocolMarshaller . marshall ( putMailboxPermissionsRequest . getGranteeId ( ) , GRANTEEID_BINDING ) ; protocolMarshaller . marshall ( putMailboxPermissionsRequest . getPermissionValues ( ) , PERMISSIONVALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OQueryModel { /** * Get the size of the data * @ return results size */ public long size ( ) { } }
if ( size == null ) { ODatabaseDocument db = OrientDbWebSession . get ( ) . getDatabase ( ) ; OSQLSynchQuery < ODocument > query = new OSQLSynchQuery < ODocument > ( queryManager . getCountSql ( ) ) ; List < ODocument > ret = db . query ( enhanceContextByVariables ( query ) , prepareParams ( ) ) ; if ( ret != null && ret . size ( ) > 0 ) { Number sizeNumber = ret . get ( 0 ) . field ( "count" ) ; size = sizeNumber != null ? sizeNumber . longValue ( ) : 0 ; } else { size = 0L ; } } return size ;
public class PathResourceManager { /** * Apply security check for case insensitive file systems . */ protected PathResource getFileResource ( final Path file , final String path , final Path symlinkBase , String normalizedFile ) throws IOException { } }
if ( this . caseSensitive ) { if ( symlinkBase != null ) { String relative = symlinkBase . relativize ( file . normalize ( ) ) . toString ( ) ; String fileResolved = file . toRealPath ( ) . toString ( ) ; String symlinkBaseResolved = symlinkBase . toRealPath ( ) . toString ( ) ; if ( ! fileResolved . startsWith ( symlinkBaseResolved ) ) { log . tracef ( "Rejected path resource %s from path resource manager with base %s, as the case did not match actual case of %s" , path , base , normalizedFile ) ; return null ; } String compare = fileResolved . substring ( symlinkBaseResolved . length ( ) ) ; if ( compare . startsWith ( fileSystem . getSeparator ( ) ) ) { compare = compare . substring ( fileSystem . getSeparator ( ) . length ( ) ) ; } if ( relative . startsWith ( fileSystem . getSeparator ( ) ) ) { relative = relative . substring ( fileSystem . getSeparator ( ) . length ( ) ) ; } if ( relative . equals ( compare ) ) { log . tracef ( "Found path resource %s from path resource manager with base %s" , path , base ) ; return new PathResource ( file , this , path , eTagFunction . generate ( file ) ) ; } log . tracef ( "Rejected path resource %s from path resource manager with base %s, as the case did not match actual case of %s" , path , base , normalizedFile ) ; return null ; } else if ( isFileSameCase ( file , normalizedFile ) ) { log . tracef ( "Found path resource %s from path resource manager with base %s" , path , base ) ; return new PathResource ( file , this , path , eTagFunction . generate ( file ) ) ; } else { log . tracef ( "Rejected path resource %s from path resource manager with base %s, as the case did not match actual case of %s" , path , base , normalizedFile ) ; return null ; } } else { log . tracef ( "Found path resource %s from path resource manager with base %s" , path , base ) ; return new PathResource ( file , this , path , eTagFunction . generate ( file ) ) ; }
public class CmsMenuListItem { /** * Disables the edit button with the given reason . < p > * @ param reason the disable reason * @ param locked < code > true < / code > if the resource is locked */ public void disableEdit ( String reason , boolean locked ) { } }
m_editButton . disable ( reason ) ; if ( locked ) { m_editButton . setImageClass ( "opencms-icon-lock-20" ) ; }
public class ThreadPoolTaskScheduler { /** * { @ inheritDoc } * @ see org . audit4j . core . schedule . TaskExecutor # execute ( java . lang . Runnable ) */ @ Override public void execute ( Runnable task ) { } }
Executor executor = getScheduledExecutor ( ) ; try { executor . execute ( errorHandlingTask ( task , false ) ) ; } catch ( RejectedExecutionException ex ) { throw new TaskRejectedException ( "Executor [" + executor + "] did not accept task: " + task , ex ) ; }
public class CmsPatternPanelWeeklyController { /** * Set the weekdays at which the event should take place . * @ param weekDays the weekdays at which the event should take place . */ public void setWeekDays ( SortedSet < WeekDay > weekDays ) { } }
final SortedSet < WeekDay > newWeekDays = null == weekDays ? new TreeSet < WeekDay > ( ) : weekDays ; SortedSet < WeekDay > currentWeekDays = m_model . getWeekDays ( ) ; if ( ! currentWeekDays . equals ( newWeekDays ) ) { conditionallyRemoveExceptionsOnChange ( new Command ( ) { public void execute ( ) { m_model . setWeekDays ( newWeekDays ) ; onValueChange ( ) ; } } , ! newWeekDays . containsAll ( m_model . getWeekDays ( ) ) ) ; }
public class UrlStringBuilder { /** * Adds the provided elements to the path * @ param elements Path elements to add * @ return this */ public UrlStringBuilder addPath ( String ... elements ) { } }
Validate . noNullElements ( elements , "elements cannot be null" ) ; for ( final String element : elements ) { this . path . add ( element ) ; } return this ;
public class ConfluenceGreenPepper { /** * Verifies if the the selectedSystemUnderTestInfo matches the specified key * @ param selectedSystemUnderTestInfo a { @ link java . lang . String } object . * @ param key a { @ link java . lang . String } object . * @ return true if the the selectedSystemUnderTestInfo matches the specified key . */ public boolean isSelected ( String selectedSystemUnderTestInfo , String key ) { } }
return selectedSystemUnderTestInfo != null ? selectedSystemUnderTestInfo . equals ( key ) : false ;
public class AWSKMSAsyncClient { /** * Simplified method form for invoking the CreateKey operation with an AsyncHandler . * @ see # createKeyAsync ( CreateKeyRequest , com . amazonaws . handlers . AsyncHandler ) */ @ Override public java . util . concurrent . Future < CreateKeyResult > createKeyAsync ( com . amazonaws . handlers . AsyncHandler < CreateKeyRequest , CreateKeyResult > asyncHandler ) { } }
return createKeyAsync ( new CreateKeyRequest ( ) , asyncHandler ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getSECCOLSPCE ( ) { } }
if ( seccolspceEEnum == null ) { seccolspceEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 68 ) ; } return seccolspceEEnum ;
public class PrintStreamOutput { /** * Escapes args ' string values according to format * @ param format the Format used by the PrintStream * @ param args the array of args to escape * @ return The cloned and escaped array of args */ protected Object [ ] escape ( final Format format , Object ... args ) { } }
// Transformer that escapes HTML , XML , JSON strings Transformer < Object , Object > escapingTransformer = new Transformer < Object , Object > ( ) { @ Override public Object transform ( Object object ) { return format . escapeValue ( object ) ; } } ; List < Object > list = Arrays . asList ( ArrayUtils . clone ( args ) ) ; CollectionUtils . transform ( list , escapingTransformer ) ; return list . toArray ( ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcFlowStorageDevice ( ) { } }
if ( ifcFlowStorageDeviceEClass == null ) { ifcFlowStorageDeviceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 288 ) ; } return ifcFlowStorageDeviceEClass ;
public class IPSettings { /** * puts all the children at the IPv4 or IPv6 nodes for fast insertion . this method does not look for * a more accurate insertion point and is useful when adding many items at once , e . g . for Country * Codes of all known IP ranges * @ param children */ public void putAll ( List < IPRangeNode < Map > > children ) { } }
for ( IPRangeNode child : children ) { this . put ( child , false ) ; // pass false for optimized insertion performance }
public class AuditLoggerFactory { /** * Creates new instance of JMS audit logger based on given connection factory and queue . * NOTE : this will build the logger but it is not registered directly on a session : once received , * it will need to be registered as an event listener * @ param transacted determines if JMS session is transacted or not * @ param connFactory connection factory instance * @ param queue JMS queue instance * @ return new instance of JMS audit logger */ public static AbstractAuditLogger newJMSInstance ( boolean transacted , ConnectionFactory connFactory , Queue queue ) { } }
AsyncAuditLogProducer logger = new AsyncAuditLogProducer ( ) ; logger . setTransacted ( transacted ) ; logger . setConnectionFactory ( connFactory ) ; logger . setQueue ( queue ) ; return logger ;
public class App { /** * Adds a user - defined data type to the types map . * @ param pluralDatatype the plural form of the type * @ param datatype a datatype , must not be null or empty */ public void addDatatype ( String pluralDatatype , String datatype ) { } }
pluralDatatype = Utils . noSpaces ( Utils . stripAndTrim ( pluralDatatype , " " ) , "-" ) ; datatype = Utils . noSpaces ( Utils . stripAndTrim ( datatype , " " ) , "-" ) ; if ( StringUtils . isBlank ( pluralDatatype ) || StringUtils . isBlank ( datatype ) ) { return ; } if ( getDatatypes ( ) . size ( ) >= Config . MAX_DATATYPES_PER_APP ) { LoggerFactory . getLogger ( App . class ) . warn ( "Maximum number of types per app reached - {}." , Config . MAX_DATATYPES_PER_APP ) ; return ; } if ( ! getDatatypes ( ) . containsKey ( pluralDatatype ) && ! getDatatypes ( ) . containsValue ( datatype ) && ! ParaObjectUtils . getCoreTypes ( ) . containsKey ( pluralDatatype ) ) { getDatatypes ( ) . put ( pluralDatatype , datatype ) ; }
public class JarArchiveRepository { /** * Translated a module id to an absolute path of the module jar */ protected Path getModuleJarPath ( ModuleId moduleId ) { } }
Path moduleJarPath = rootDir . resolve ( moduleId + ".jar" ) ; return moduleJarPath ;
public class ServerRequest { /** * Update the additional metadata provided using { @ link Branch # setRequestMetadata ( String , String ) } to the requests . */ private void updateRequestMetadata ( ) { } }
// Take event level metadata , merge with top level metadata // event level metadata takes precedence try { JSONObject metadata = new JSONObject ( ) ; Iterator < String > i = prefHelper_ . getRequestMetadata ( ) . keys ( ) ; while ( i . hasNext ( ) ) { String k = i . next ( ) ; metadata . put ( k , prefHelper_ . getRequestMetadata ( ) . get ( k ) ) ; } JSONObject originalMetadata = params_ . optJSONObject ( Defines . Jsonkey . Metadata . getKey ( ) ) ; if ( originalMetadata != null ) { Iterator < String > postIter = originalMetadata . keys ( ) ; while ( postIter . hasNext ( ) ) { String key = postIter . next ( ) ; // override keys from above metadata . put ( key , originalMetadata . get ( key ) ) ; } } // Install metadata need to be send only with Install request if ( ( this instanceof ServerRequestRegisterInstall ) && prefHelper_ . getInstallMetadata ( ) . length ( ) > 0 ) { params_ . putOpt ( Defines . Jsonkey . InstallMetadata . getKey ( ) , prefHelper_ . getInstallMetadata ( ) ) ; } params_ . put ( Defines . Jsonkey . Metadata . getKey ( ) , metadata ) ; } catch ( JSONException e ) { PrefHelper . Debug ( "Could not merge metadata, ignoring user metadata." ) ; }
public class DomainValidator { /** * Returns true if the specified < code > String < / code > matches any * widely used " local " domains ( localhost or localdomain ) . Leading dots are * ignored if present . The search is case - insensitive . * @ param lTld the parameter to check for local TLD status , not null * @ return true if the parameter is an local TLD */ public boolean isValidLocalTld ( String lTld ) { } }
final String key = chompLeadingDot ( unicodeToASCII ( lTld ) . toLowerCase ( Locale . ENGLISH ) ) ; return arrayContains ( LOCAL_TLDS , key ) ;
public class WebACRolesProvider { /** * Given a path ( e . g . / a / b / c / d ) retrieve a list of all ancestor paths . * In this case , that would be a list of " / a / b / c " , " / a / b " , " / a " and " / " . */ private static List < String > getAllPathAncestors ( final String path ) { } }
final List < String > segments = asList ( path . split ( "/" ) ) ; return range ( 1 , segments . size ( ) ) . mapToObj ( frameSize -> FEDORA_INTERNAL_PREFIX + "/" + String . join ( "/" , segments . subList ( 1 , frameSize ) ) ) . collect ( toList ( ) ) ;
public class BreadCrumbPresenter { /** * Sets up the BreadcrumbBar depending on the displayed category . */ public void setupBreadCrumbBar ( ) { } }
String [ ] stringArr = model . getDisplayedCategory ( ) . getBreadcrumb ( ) . split ( BREADCRUMB_DELIMITER ) ; Category [ ] categories = new Category [ stringArr . length ] ; // Collecting all parent categories from the displayed category using the breadcrumb . // There will always be at least one category which will be added to the breadcrumb . categories [ 0 ] = searchCategory ( stringArr [ 0 ] ) ; // If there are more than one category in the stringArr [ ] , they will be added . For this reason // the Integer in the loop starts with one , thus only the second element in the array is needed . for ( int i = 1 ; i < stringArr . length ; ++ i ) { stringArr [ i ] = stringArr [ i - 1 ] + BREADCRUMB_DELIMITER + stringArr [ i ] ; categories [ i ] = searchCategory ( stringArr [ i ] ) ; } breadCrumbView . breadcrumbsItm = BreadCrumbBar . buildTreeModel ( categories ) ; breadCrumbView . breadCrumbBar . setSelectedCrumb ( breadCrumbView . breadcrumbsItm ) ;
public class Matrix { /** * multiplies this matrix by ' b ' and returns the result * See http : / / en . wikipedia . org / wiki / Matrix _ multiplication * @ param by The matrix to multiply by * @ returnthe resulting matrix */ public Matrix multiply ( Matrix by ) { } }
Matrix rslt = new Matrix ( ) ; float [ ] a = vals ; float [ ] b = by . vals ; float [ ] c = rslt . vals ; c [ I11 ] = a [ I11 ] * b [ I11 ] + a [ I12 ] * b [ I21 ] + a [ I13 ] * b [ I31 ] ; c [ I12 ] = a [ I11 ] * b [ I12 ] + a [ I12 ] * b [ I22 ] + a [ I13 ] * b [ I32 ] ; c [ I13 ] = a [ I11 ] * b [ I13 ] + a [ I12 ] * b [ I23 ] + a [ I13 ] * b [ I33 ] ; c [ I21 ] = a [ I21 ] * b [ I11 ] + a [ I22 ] * b [ I21 ] + a [ I23 ] * b [ I31 ] ; c [ I22 ] = a [ I21 ] * b [ I12 ] + a [ I22 ] * b [ I22 ] + a [ I23 ] * b [ I32 ] ; c [ I23 ] = a [ I21 ] * b [ I13 ] + a [ I22 ] * b [ I23 ] + a [ I23 ] * b [ I33 ] ; c [ I31 ] = a [ I31 ] * b [ I11 ] + a [ I32 ] * b [ I21 ] + a [ I33 ] * b [ I31 ] ; c [ I32 ] = a [ I31 ] * b [ I12 ] + a [ I32 ] * b [ I22 ] + a [ I33 ] * b [ I32 ] ; c [ I33 ] = a [ I31 ] * b [ I13 ] + a [ I32 ] * b [ I23 ] + a [ I33 ] * b [ I33 ] ; return rslt ;
public class ControlBeanContext { /** * Resets the composite control ID for this context and all children beneath it . This * can be used to invalidate cached values when necessary ( for example , when a context * is reparented ) . */ private void resetControlID ( ) { } }
_controlID = null ; for ( Object child : this ) { if ( child instanceof ControlBeanContext ) ( ( ControlBeanContext ) child ) . resetControlID ( ) ; }
public class WizardProjectsImportPageProxy { /** * utils */ private Button getMainPageButton ( String field ) throws NoSuchFieldException , SecurityException , IllegalArgumentException , IllegalAccessException { } }
Field f = mainPage . getClass ( ) . getDeclaredField ( field ) ; f . setAccessible ( true ) ; return ( Button ) f . get ( mainPage ) ;
public class ACETernWriter { /** * initialize */ public void initialize ( ) throws ResourceInitializationException { } }
mDocNum = 0 ; convertTimex3To2 = ( Boolean ) getConfigParameterValue ( PARAM_CONVERTTIMEX3TO2 ) ; mOutputDir = new File ( ( String ) getConfigParameterValue ( PARAM_OUTPUTDIR ) ) ; if ( ! mOutputDir . exists ( ) ) { mOutputDir . mkdirs ( ) ; }
public class WCOutputStream31 { /** * @ see javax . servlet . ServletOutputStream # println ( boolean ) */ public void println ( boolean b ) throws IOException { } }
if ( this . _listener != null && ! checkIfCalledFromWLonError ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "non blocking println boolean , WriteListener enabled: " + this . _listener ) ; this . println_NonBlocking ( Boolean . toString ( b ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "println boolean" ) ; super . println ( b ) ; }
public class MyActivity { /** * Called when the activity is first created . */ @ Override public void onCreate ( Bundle savedInstanceState ) { } }
super . onCreate ( savedInstanceState ) ; setContentView ( R . layout . main ) ; final InfiniteViewPager viewPager = ( InfiniteViewPager ) findViewById ( R . id . infinite_viewpager ) ; viewPager . setAdapter ( new MyInfinitePagerAdapter ( 0 ) ) ; viewPager . setPageMargin ( 20 ) ; viewPager . setOnInfinitePageChangeListener ( new InfiniteViewPager . OnInfinitePageChangeListener ( ) { @ Override public void onPageScrolled ( final Object indicator , final float positionOffset , final int positionOffsetPixels ) { Log . d ( "InfiniteViewPager" , "onPageScrolled " . concat ( String . valueOf ( indicator ) ) ) ; } @ Override public void onPageSelected ( final Object indicator ) { Log . d ( "InfiniteViewPager" , "onPageSelected " + indicator . toString ( ) ) ; } @ Override public void onPageScrollStateChanged ( final int state ) { Log . d ( "InfiniteViewPager" , "state " + String . valueOf ( state ) ) ; } } ) ; final Button btn = ( Button ) findViewById ( R . id . current_item_btn ) ; btn . setOnClickListener ( new View . OnClickListener ( ) { @ Override public void onClick ( final View v ) { viewPager . setCurrentIndicator ( 6 ) ; } } ) ;
public class NotifyConfigurationTypeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( NotifyConfigurationType notifyConfigurationType , ProtocolMarshaller protocolMarshaller ) { } }
if ( notifyConfigurationType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( notifyConfigurationType . getFrom ( ) , FROM_BINDING ) ; protocolMarshaller . marshall ( notifyConfigurationType . getReplyTo ( ) , REPLYTO_BINDING ) ; protocolMarshaller . marshall ( notifyConfigurationType . getSourceArn ( ) , SOURCEARN_BINDING ) ; protocolMarshaller . marshall ( notifyConfigurationType . getBlockEmail ( ) , BLOCKEMAIL_BINDING ) ; protocolMarshaller . marshall ( notifyConfigurationType . getNoActionEmail ( ) , NOACTIONEMAIL_BINDING ) ; protocolMarshaller . marshall ( notifyConfigurationType . getMfaEmail ( ) , MFAEMAIL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SerialVersionUIDAdder { /** * Computes and returns the value of SVUID . * @ return Returns the serial version UID * @ throws IOException * if an I / O error occurs */ protected long computeSVUID ( ) throws IOException { } }
ByteArrayOutputStream bos ; DataOutputStream dos = null ; long svuid = 0 ; try { bos = new ByteArrayOutputStream ( ) ; dos = new DataOutputStream ( bos ) ; /* * 1 . The class name written using UTF encoding . */ dos . writeUTF ( name . replace ( '/' , '.' ) ) ; /* * 2 . The class modifiers written as a 32 - bit integer . */ dos . writeInt ( access & ( Opcodes . ACC_PUBLIC | Opcodes . ACC_FINAL | Opcodes . ACC_INTERFACE | Opcodes . ACC_ABSTRACT ) ) ; /* * 3 . The name of each interface sorted by name written using UTF * encoding . */ Arrays . sort ( interfaces ) ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { dos . writeUTF ( interfaces [ i ] . replace ( '/' , '.' ) ) ; } /* * 4 . For each field of the class sorted by field name ( except * private static and private transient fields ) : * 1 . The name of the field in UTF encoding . 2 . The modifiers of the * field written as a 32 - bit integer . 3 . The descriptor of the field * in UTF encoding * Note that field signatures are not dot separated . Method and * constructor signatures are dot separated . Go figure . . . */ writeItems ( svuidFields , dos , false ) ; /* * 5 . If a class initializer exists , write out the following : 1 . The * name of the method , < clinit > , in UTF encoding . 2 . The modifier of * the method , java . lang . reflect . Modifier . STATIC , written as a * 32 - bit integer . 3 . The descriptor of the method , ( ) V , in UTF * encoding . */ if ( hasStaticInitializer ) { dos . writeUTF ( "<clinit>" ) ; dos . writeInt ( Opcodes . ACC_STATIC ) ; dos . writeUTF ( "()V" ) ; } // if . . /* * 6 . For each non - private constructor sorted by method name and * signature : 1 . The name of the method , < init > , in UTF encoding . 2. * The modifiers of the method written as a 32 - bit integer . 3 . The * descriptor of the method in UTF encoding . */ writeItems ( svuidConstructors , dos , true ) ; /* * 7 . For each non - private method sorted by method name and * signature : 1 . The name of the method in UTF encoding . 2 . The * modifiers of the method written as a 32 - bit integer . 3 . The * descriptor of the method in UTF encoding . */ writeItems ( svuidMethods , dos , true ) ; dos . flush ( ) ; /* * 8 . The SHA - 1 algorithm is executed on the stream of bytes * produced by DataOutputStream and produces five 32 - bit values * sha [ 0 . . 4 ] . */ byte [ ] hashBytes = computeSHAdigest ( bos . toByteArray ( ) ) ; /* * 9 . The hash value is assembled from the first and second 32 - bit * values of the SHA - 1 message digest . If the result of the message * digest , the five 32 - bit words H0 H1 H2 H3 H4 , is in an array of * five int values named sha , the hash value would be computed as * follows : * long hash = ( ( sha [ 0 ] > > > 24 ) & 0xFF ) | ( ( sha [ 0 ] > > > 16 ) & 0xFF ) * < < 8 | ( ( sha [ 0 ] > > > 8 ) & 0xFF ) < < 16 | ( ( sha [ 0 ] > > > 0 ) & 0xFF ) < < * 24 | ( ( sha [ 1 ] > > > 24 ) & 0xFF ) < < 32 | ( ( sha [ 1 ] > > > 16 ) & 0xFF ) < < * 40 | ( ( sha [ 1 ] > > > 8 ) & 0xFF ) < < 48 | ( ( sha [ 1 ] > > > 0 ) & 0xFF ) < < * 56; */ for ( int i = Math . min ( hashBytes . length , 8 ) - 1 ; i >= 0 ; i -- ) { svuid = ( svuid << 8 ) | ( hashBytes [ i ] & 0xFF ) ; } } finally { // close the stream ( if open ) if ( dos != null ) { dos . close ( ) ; } } return svuid ;
public class PartialUniqueIndex { /** * Transfer all entries from src to dest tables */ private void transfer ( Entry [ ] src , Entry [ ] dest ) { } }
for ( int j = 0 ; j < src . length ; ++ j ) { Entry e = src [ j ] ; src [ j ] = null ; while ( e != null ) { Entry next = e . next ; Object key = e . get ( ) ; if ( key == null || ( timeToLive > 0 && ( ( TimedEntry ) e ) . isExpired ( timeToLive ) ) ) { e . next = null ; // Help GC size -- ; } else { int i = indexFor ( e . hash , dest . length ) ; e . next = dest [ i ] ; dest [ i ] = e ; } e = next ; } }
public class Pareto { /** * Calculates the < em > non - domination < / em > rank of the given input { @ code set } , * using the given { @ code dominance } comparator . * @ apiNote * Calculating the rank has a time and space complexity of { @ code O ( n ^ 2 } , * where { @ code n } the { @ code set } size . * < b > Reference : < / b > < em > * Kalyanmoy Deb , Associate Member , IEEE , Amrit Pratap , * Sameer Agarwal , and T . Meyarivan . * A Fast and Elitist Multiobjective Genetic Algorithm : NSGA - II , * IEEE TRANSACTIONS ON EVOLUTIONARY COMPUTATION , VOL . 6 , NO . 2, * APRIL 2002 . < / em > * @ param set the input set * @ param dominance the dominance comparator used * @ param < T > the element type * @ return the < em > non - domination < / em > rank of the given input { @ code set } */ public static < T > int [ ] rank ( final Seq < ? extends T > set , final Comparator < ? super T > dominance ) { } }
// Pre - compute the dominance relations . final int [ ] [ ] d = new int [ set . size ( ) ] [ set . size ( ) ] ; for ( int i = 0 ; i < set . size ( ) ; ++ i ) { for ( int j = i + 1 ; j < set . size ( ) ; ++ j ) { d [ i ] [ j ] = dominance . compare ( set . get ( i ) , set . get ( j ) ) ; d [ j ] [ i ] = - d [ i ] [ j ] ; } } // Compute for each element p the element q that it dominates and the // number of times it is dominated . Using the names as defined in the // referenced paper . final int [ ] nq = new int [ set . size ( ) ] ; final List < IntList > fronts = new ArrayList < > ( ) ; IntList Fi = new IntList ( ) ; for ( int p = 0 ; p < set . size ( ) ; ++ p ) { final IntList Sp = new IntList ( ) ; int np = 0 ; for ( int q = 0 ; q < set . size ( ) ; ++ q ) { if ( p != q ) { // If p dominates q , add q to the set of solutions // dominated by p . if ( d [ p ] [ q ] > 0 ) { Sp . add ( q ) ; // Increment the domination counter of p . } else if ( d [ q ] [ p ] > 0 ) { np += 1 ; } } } // p belongs to the first front . if ( np == 0 ) { Fi . add ( p ) ; } fronts . add ( Sp ) ; nq [ p ] = np ; } // Initialize the front counter . int i = 0 ; final int [ ] ranks = new int [ set . size ( ) ] ; while ( ! Fi . isEmpty ( ) ) { // Used to store the members of the next front . final IntList Q = new IntList ( ) ; for ( int p = 0 ; p < Fi . size ( ) ; ++ p ) { final int fi = Fi . get ( p ) ; ranks [ fi ] = i ; // Update the dominated counts as compute next front . for ( int k = 0 , n = fronts . get ( fi ) . size ( ) ; k < n ; ++ k ) { final int q = fronts . get ( fi ) . get ( k ) ; nq [ q ] -= 1 ; // q belongs to the next front . if ( nq [ q ] == 0 ) { Q . add ( q ) ; } } } ++ i ; Fi = Q ; } return ranks ;
public class PdfReport { /** * cette méthode est utilisée dans l ' ihm Swing */ public void preInitGraphs ( Map < String , byte [ ] > newSmallGraphs , Map < String , byte [ ] > newSmallOtherGraphs , Map < String , byte [ ] > newLargeGraphs ) { } }
pdfCoreReport . preInitGraphs ( newSmallGraphs , newSmallOtherGraphs , newLargeGraphs ) ;
public class ClassUtils { /** * Return the user - defined class for the given instance : usually simply * the class of the given instance , but the original class in case of a * CGLIB - generated subclass . * @ param instance the instance to check * @ return the user - defined class */ public static Class < ? > getUserClass ( Object instance ) { } }
Assert . notNull ( instance , "Instance must not be null" ) ; return getUserClass ( instance . getClass ( ) ) ;
public class JapanesePersonRecognition { /** * 插入日本人名 * @ param name * @ param activeLine * @ param wordNetOptimum * @ param wordNetAll */ private static void insertName ( String name , int activeLine , WordNet wordNetOptimum , WordNet wordNetAll ) { } }
if ( isBadCase ( name ) ) return ; wordNetOptimum . insert ( activeLine , new Vertex ( Predefine . TAG_PEOPLE , name , new CoreDictionary . Attribute ( Nature . nrj ) , WORD_ID ) , wordNetAll ) ;
public class Engine { /** * Removes the output variable of the given name . * @ param name is the name of the output variable * @ return the output variable of the given name * @ throws RuntimeException if there is no variable with the given name */ public OutputVariable removeOutputVariable ( String name ) { } }
for ( Iterator < OutputVariable > it = this . outputVariables . iterator ( ) ; it . hasNext ( ) ; ) { OutputVariable outputVariable = it . next ( ) ; if ( outputVariable . getName ( ) . equals ( name ) ) { it . remove ( ) ; return outputVariable ; } } throw new RuntimeException ( String . format ( "[engine error] no output variable by name <%s>" , name ) ) ;
public class SandBoxMaker { /** * Generate command string */ public String sandboxPolicy ( String workerId , Map < String , String > replaceMap ) throws IOException { } }
if ( ! isEnable ) { return "" ; } replaceMap . putAll ( replaceBaseMap ) ; String tmpPolicy = generatePolicyFile ( replaceMap ) ; File file = new File ( tmpPolicy ) ; String policyPath = StormConfig . worker_root ( conf , workerId ) + File . separator + SANBOX_TEMPLATE_NAME ; File dest = new File ( policyPath ) ; file . renameTo ( dest ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( " -Djava.security.manager -Djava.security.policy=" ) ; sb . append ( policyPath ) ; return sb . toString ( ) ;
public class LongTupleIterators { /** * Returns an iterator that returns the { @ link MutableLongTuple } s from the * given delegate , wrapped at the given bounds . < br > * < br > * NOTE : The iterator will store REFERENCES to the given bounds . * They may NOT be modified while the iteration is in progress . * @ param bounds The bounds * @ param delegate The delegate iterator * @ return The iterator */ static Iterator < MutableLongTuple > wrappingIteratorInternal ( LongTuple bounds , Iterator < ? extends MutableLongTuple > delegate ) { } }
return new Iterator < MutableLongTuple > ( ) { @ Override public boolean hasNext ( ) { return delegate . hasNext ( ) ; } @ Override public MutableLongTuple next ( ) { return LongTupleUtils . wrap ( delegate . next ( ) , bounds ) ; } @ Override public void remove ( ) { delegate . remove ( ) ; } } ;
public class NetUtils { /** * Get the socket factory corresponding to the given proxy URI . If the * given proxy URI corresponds to an absence of configuration parameter , * returns null . If the URI is malformed raises an exception . * @ param propValue the property which is the class name of the * SocketFactory to instantiate ; assumed non null and non empty . * @ return a socket factory as defined in the property value . */ public static SocketFactory getSocketFactoryFromProperty ( Configuration conf , String propValue ) { } }
try { Class < ? > theClass = conf . getClassByName ( propValue ) ; return ( SocketFactory ) ReflectionUtils . newInstance ( theClass , conf ) ; } catch ( ClassNotFoundException cnfe ) { throw new RuntimeException ( "Socket Factory class not found: " + cnfe ) ; }
public class AppServiceEnvironmentsInner { /** * Get all worker pools of an App Service Environment . * Get all worker pools of an App Service Environment . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; WorkerPoolResourceInner & gt ; object if successful . */ public PagedList < WorkerPoolResourceInner > listWorkerPools ( final String resourceGroupName , final String name ) { } }
ServiceResponse < Page < WorkerPoolResourceInner > > response = listWorkerPoolsSinglePageAsync ( resourceGroupName , name ) . toBlocking ( ) . single ( ) ; return new PagedList < WorkerPoolResourceInner > ( response . body ( ) ) { @ Override public Page < WorkerPoolResourceInner > nextPage ( String nextPageLink ) { return listWorkerPoolsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class FilePolicyIndex { /** * ( non - Javadoc ) * @ see * org . fcrepo . server . security . xacml . pdp . data . PolicyDataManager # deletePolicy ( java . lang . String ) */ @ Override public boolean deletePolicy ( String name ) throws PolicyIndexException { } }
writeLock . lock ( ) ; try { logger . debug ( "Deleting policy named: " + name ) ; return doDelete ( name ) ; } finally { writeLock . unlock ( ) ; }
public class AbstractBeanJsonSerializer { /** * { @ inheritDoc } */ @ Override public void doSerialize ( JsonWriter writer , T value , JsonSerializationContext ctx , JsonSerializerParameters params ) { } }
getSerializer ( writer , value , ctx ) . serializeInternally ( writer , value , ctx , params , defaultIdentityInfo , defaultTypeInfo ) ;
public class ChatLinearLayoutManager { /** * Helper method to call appropriate recycle method depending on current layout direction * @ param recycler Current recycler that is attached to RecyclerView * @ param layoutState Current layout state . Right now , this object does not change but * we may consider moving it out of this view so passing around as a * parameter for now , rather than accessing { @ link # mLayoutState } * @ see # recycleViewsFromStart ( RecyclerView . Recycler , int ) * @ see # recycleViewsFromEnd ( RecyclerView . Recycler , int ) * @ see LayoutState # mLayoutDirection */ private void recycleByLayoutState ( RecyclerView . Recycler recycler , LayoutState layoutState ) { } }
if ( ! layoutState . mRecycle ) { return ; } if ( layoutState . mLayoutDirection == LayoutState . LAYOUT_START ) { recycleViewsFromEnd ( recycler , layoutState . mScrollingOffset ) ; } else { recycleViewsFromStart ( recycler , layoutState . mScrollingOffset ) ; }
public class JavaTypeUtil { /** * 处理泛型名称 * @ param fullName 泛型全名 * @ return 泛型的名称 */ private static String dealName ( String fullName ) { } }
Matcher matcher = SUPER_PATTERN . matcher ( fullName ) ; String name ; if ( matcher . find ( ) ) { name = matcher . group ( 1 ) ; } else { matcher = EXTENDS_PATTERN . matcher ( fullName ) ; if ( matcher . find ( ) ) { name = matcher . group ( 1 ) ; } else { name = fullName ; } } return name ;
public class PrimitiveCases { /** * Matches a float . */ public static DecomposableMatchBuilder0 < Float > caseFloat ( float f ) { } }
List < Matcher < Object > > matchers = new ArrayList < > ( ) ; matchers . add ( eq ( f ) ) ; return new DecomposableMatchBuilder0 < > ( matchers , new PrimitiveFieldExtractor < > ( Float . class ) ) ;
public class ListTagsForCertificateRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListTagsForCertificateRequest listTagsForCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listTagsForCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTagsForCertificateRequest . getCertificateArn ( ) , CERTIFICATEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommerceAccountUtil { /** * Returns an ordered range of all the commerce accounts that the user has permissions to view where userId = & # 63 ; and type = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceAccountModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param userId the user ID * @ param type the type * @ param start the lower bound of the range of commerce accounts * @ param end the upper bound of the range of commerce accounts ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce accounts that the user has permission to view */ public static List < CommerceAccount > filterFindByU_T ( long userId , int type , int start , int end , OrderByComparator < CommerceAccount > orderByComparator ) { } }
return getPersistence ( ) . filterFindByU_T ( userId , type , start , end , orderByComparator ) ;
public class OpenAddressingHashMap { /** * If finish ( ) has already been called on this map , this method returns the * value associated with the specified key . If the specified key is not in * this map , returns null . * If finish ( ) has not been called on this map , this method always returns * null . */ @ Override @ SuppressWarnings ( "unchecked" ) public Object getUndefined ( Object key ) { } }
// Math . abs ( x % n ) is the same as ( x & n - 1 ) when n is a power of 2 int hashModMask = OpenAddressing . hashTableLength ( hashTable ) - 1 ; int hash = hashCode ( key ) ; int bucket = hash & hashModMask ; int hashEntry = OpenAddressing . getHashEntry ( hashTable , bucket ) * 2 ; // We found an entry at this hash position while ( hashEntry >= 0 ) { if ( Utils . equal ( keysAndValues [ hashEntry ] , key ) ) { return ( V ) keysAndValues [ hashEntry + 1 ] ; } // linear probing resolves collisions . bucket = ( bucket + 1 ) & hashModMask ; hashEntry = OpenAddressing . getHashEntry ( hashTable , bucket ) * 2 ; } return AbstractArrayMap . undefined ;
public class TransactionLocalMap { /** * Version of getEntry method for use when key is not found in * its direct hash slot . * @ param key the thread local object * @ param i the table index for key ' s hash code * @ param e the entry at table [ i ] * @ return the entry associated with key , or null if no such */ private Object getEntryAfterMiss ( TransactionLocal key , int i , Entry e ) { } }
Entry [ ] tab = table ; int len = tab . length ; while ( e != null ) { if ( e . key == key ) return e . value ; i = nextIndex ( i , len ) ; e = tab [ i ] ; } return null ;
public class Mappings { /** * ( convert to Double ) mapping * @ param constraints constraints * @ return new created mapping */ public static Mapping < Double > doublev ( Constraint ... constraints ) { } }
return new FieldMapping ( InputMode . SINGLE , mkSimpleConverter ( s -> isEmptyStr ( s ) ? 0.0d : Double . parseDouble ( s ) ) , new MappingMeta ( MAPPING_DOUBLE , Double . class ) ) . constraint ( checking ( Double :: parseDouble , "error.double" , true ) ) . constraint ( constraints ) ;
public class ThriftServerConfig { /** * Sets a maximum frame size * @ param maxFrameSize * @ return */ @ Config ( "thrift.max-frame-size" ) public ThriftServerConfig setMaxFrameSize ( DataSize maxFrameSize ) { } }
checkArgument ( maxFrameSize . toBytes ( ) <= 0x3FFFFFFF ) ; this . maxFrameSize = maxFrameSize ; return this ;
public class ClassCompiler { /** * Compile JavaScript source into one or more Java class files . * The first compiled class will have name mainClassName . * If the results of { @ link # getTargetExtends ( ) } or * { @ link # getTargetImplements ( ) } are not null , then the first compiled * class will extend the specified super class and implement * specified interfaces . * @ return array where elements with even indexes specifies class name * and the following odd index gives class file body as byte [ ] * array . The initial element of the array always holds * mainClassName and array [ 1 ] holds its byte code . */ public Object [ ] compileToClassFiles ( String source , String sourceLocation , int lineno , String mainClassName ) { } }
Parser p = new Parser ( compilerEnv ) ; AstRoot ast = p . parse ( source , sourceLocation , lineno ) ; IRFactory irf = new IRFactory ( compilerEnv ) ; ScriptNode tree = irf . transformTree ( ast ) ; // release reference to original parse tree & parser irf = null ; ast = null ; p = null ; Class < ? > superClass = getTargetExtends ( ) ; Class < ? > [ ] interfaces = getTargetImplements ( ) ; String scriptClassName ; boolean isPrimary = ( interfaces == null && superClass == null ) ; if ( isPrimary ) { scriptClassName = mainClassName ; } else { scriptClassName = makeAuxiliaryClassName ( mainClassName , "1" ) ; } Codegen codegen = new Codegen ( ) ; codegen . setMainMethodClass ( mainMethodClassName ) ; byte [ ] scriptClassBytes = codegen . compileToClassFile ( compilerEnv , scriptClassName , tree , tree . getEncodedSource ( ) , false ) ; if ( isPrimary ) { return new Object [ ] { scriptClassName , scriptClassBytes } ; } int functionCount = tree . getFunctionCount ( ) ; ObjToIntMap functionNames = new ObjToIntMap ( functionCount ) ; for ( int i = 0 ; i != functionCount ; ++ i ) { FunctionNode ofn = tree . getFunctionNode ( i ) ; String name = ofn . getName ( ) ; if ( name != null && name . length ( ) != 0 ) { functionNames . put ( name , ofn . getParamCount ( ) ) ; } } if ( superClass == null ) { superClass = ScriptRuntime . ObjectClass ; } byte [ ] mainClassBytes = JavaAdapter . createAdapterCode ( functionNames , mainClassName , superClass , interfaces , scriptClassName ) ; return new Object [ ] { mainClassName , mainClassBytes , scriptClassName , scriptClassBytes } ;