signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TelURLImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . TelURL # setPhoneNumber ( java . lang . String ) */
public void setPhoneNumber ( String number ) { } } | String phoneNumber = number ; if ( number . startsWith ( "+" ) ) { phoneNumber = phoneNumber . substring ( 1 ) ; } try { basePhoneNumber ( phoneNumber ) ; telUrl . setPhoneNumber ( phoneNumber ) ; } catch ( ParseException ex ) { logger . error ( "Error setting phone number " + number ) ; throw new java . lang . IllegalArgumentException ( "phone number " + number + " is invalid" , ex ) ; } |
public class JDBC4PreparedStatement { /** * Sets the designated parameter to the given input stream . */
@ Override public void setAsciiStream ( int parameterIndex , InputStream x ) throws SQLException { } } | checkParameterBounds ( parameterIndex ) ; throw SQLError . noSupport ( ) ; |
public class XMLDocumentCache { /** * Cache a parse failure for this document . */
public static void cacheParseFailure ( XmlFileModel key ) { } } | map . put ( getKey ( key ) , new CacheDocument ( true , null ) ) ; |
public class AWSMigrationHubClient { /** * Lists progress update streams associated with the user account making this call .
* @ param listProgressUpdateStreamsRequest
* @ return Result of the ListProgressUpdateStreams operation returned by the service .
* @ throws AccessDeniedException
* You do not have sufficient access to perform this action .
* @ throws InternalServerErrorException
* Exception raised when there is an internal , configuration , or dependency error encountered .
* @ throws ServiceUnavailableException
* Exception raised when there is an internal , configuration , or dependency error encountered .
* @ throws InvalidInputException
* Exception raised when the provided input violates a policy constraint or is entered in the wrong format
* or data type .
* @ sample AWSMigrationHub . ListProgressUpdateStreams
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / AWSMigrationHub - 2017-05-31 / ListProgressUpdateStreams "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListProgressUpdateStreamsResult listProgressUpdateStreams ( ListProgressUpdateStreamsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListProgressUpdateStreams ( request ) ; |
public class AlluxioRemoteLogFilter { /** * @ deprecated Sets option value use key = value format . The log4j . properties file uses this
* to set options . See the log4j . properties for more details .
* @ param key key ( name ) of the option
* @ param value value of the option */
@ Deprecated public void setOption ( String key , String value ) { } } | if ( key . equalsIgnoreCase ( PROCESS_TYPE_OPTION ) ) { mProcessType = value ; } |
public class InternalXbaseWithAnnotationsParser { /** * $ ANTLR start synpred41 _ InternalXbaseWithAnnotations */
public final void synpred41_InternalXbaseWithAnnotations_fragment ( ) throws RecognitionException { } } | // InternalXbaseWithAnnotations . g : 5032:6 : ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) )
// InternalXbaseWithAnnotations . g : 5032:7 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
{ // InternalXbaseWithAnnotations . g : 5032:7 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
// InternalXbaseWithAnnotations . g : 5033:7 : ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) )
{ // InternalXbaseWithAnnotations . g : 5033:7 : ( )
// InternalXbaseWithAnnotations . g : 5034:7:
{ } // InternalXbaseWithAnnotations . g : 5035:7 : ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ?
int alt134 = 2 ; int LA134_0 = input . LA ( 1 ) ; if ( ( LA134_0 == RULE_ID || LA134_0 == 14 || LA134_0 == 39 ) ) { alt134 = 1 ; } switch ( alt134 ) { case 1 : // InternalXbaseWithAnnotations . g : 5036:8 : ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
{ // InternalXbaseWithAnnotations . g : 5036:8 : ( ( ruleJvmFormalParameter ) )
// InternalXbaseWithAnnotations . g : 5037:9 : ( ruleJvmFormalParameter )
{ // InternalXbaseWithAnnotations . g : 5037:9 : ( ruleJvmFormalParameter )
// InternalXbaseWithAnnotations . g : 5038:10 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_43 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } // InternalXbaseWithAnnotations . g : 5041:8 : ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
loop133 : do { int alt133 = 2 ; int LA133_0 = input . LA ( 1 ) ; if ( ( LA133_0 == 15 ) ) { alt133 = 1 ; } switch ( alt133 ) { case 1 : // InternalXbaseWithAnnotations . g : 5042:9 : ' , ' ( ( ruleJvmFormalParameter ) )
{ match ( input , 15 , FOLLOW_22 ) ; if ( state . failed ) return ; // InternalXbaseWithAnnotations . g : 5043:9 : ( ( ruleJvmFormalParameter ) )
// InternalXbaseWithAnnotations . g : 5044:10 : ( ruleJvmFormalParameter )
{ // InternalXbaseWithAnnotations . g : 5044:10 : ( ruleJvmFormalParameter )
// InternalXbaseWithAnnotations . g : 5045:11 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_43 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } } break ; default : break loop133 ; } } while ( true ) ; } break ; } // InternalXbaseWithAnnotations . g : 5050:7 : ( ( ' | ' ) )
// InternalXbaseWithAnnotations . g : 5051:8 : ( ' | ' )
{ // InternalXbaseWithAnnotations . g : 5051:8 : ( ' | ' )
// InternalXbaseWithAnnotations . g : 5052:9 : ' | '
{ match ( input , 57 , FOLLOW_2 ) ; if ( state . failed ) return ; } } } } |
public class LinearClassifierFactory { /** * Train a classifier with a sigma tuned on a validation set .
* @ return The constructed classifier */
public LinearClassifier < L , F > trainClassifierV ( GeneralDataset < L , F > train , GeneralDataset < L , F > validation , double min , double max , boolean accuracy ) { } } | labelIndex = train . labelIndex ( ) ; featureIndex = train . featureIndex ( ) ; this . min = min ; this . max = max ; heldOutSetSigma ( train , validation ) ; double [ ] [ ] weights = trainWeights ( train ) ; return new LinearClassifier < L , F > ( weights , train . featureIndex ( ) , train . labelIndex ( ) ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/building/1.0" , name = "_GenericApplicationPropertyOfAbstractBuilding" ) public JAXBElement < Object > create_GenericApplicationPropertyOfAbstractBuilding ( Object value ) { } } | return new JAXBElement < Object > ( __GenericApplicationPropertyOfAbstractBuilding_QNAME , Object . class , null , value ) ; |
public class SubWriterHolderWriter { /** * Add the inherited summary header .
* @ param mw the writer for the member being documented
* @ param cd the classdoc to be documented
* @ param inheritedTree the content tree to which the inherited summary header will be added */
public void addInheritedSummaryHeader ( AbstractMemberWriter mw , ClassDoc cd , Content inheritedTree ) { } } | mw . addInheritedSummaryAnchor ( cd , inheritedTree ) ; mw . addInheritedSummaryLabel ( cd , inheritedTree ) ; |
public class BlockHouseHolder_DDRB { /** * Sets W to its initial value using the first column of ' y ' and the value of ' b ' :
* < br >
* W = - & beta ; v < br >
* < br >
* where v = Y ( : , 0 ) .
* @ param blockLength size of the inner block
* @ param W Submatrix being initialized .
* @ param Y Contains householder vector
* @ param widthB How wide the W block matrix is .
* @ param b beta */
public static void initializeW ( final int blockLength , final DSubmatrixD1 W , final DSubmatrixD1 Y , final int widthB , final double b ) { } } | final double dataW [ ] = W . original . data ; final double dataY [ ] = Y . original . data ; for ( int i = W . row0 ; i < W . row1 ; i += blockLength ) { int heightW = Math . min ( blockLength , W . row1 - i ) ; int indexW = i * W . original . numCols + heightW * W . col0 ; int indexY = i * Y . original . numCols + heightW * Y . col0 ; // take in account the first element in V being 1
if ( i == W . row0 ) { dataW [ indexW ] = - b ; indexW += widthB ; indexY += widthB ; for ( int k = 1 ; k < heightW ; k ++ , indexW += widthB , indexY += widthB ) { dataW [ indexW ] = - b * dataY [ indexY ] ; } } else { for ( int k = 0 ; k < heightW ; k ++ , indexW += widthB , indexY += widthB ) { dataW [ indexW ] = - b * dataY [ indexY ] ; } } } |
public class DatabaseSpec { /** * Connect to ElasticSearch using custom parameters
* @ param host ES host
* @ param foo regex needed to match method
* @ param nativePort ES port
* @ param bar regex needed to match method
* @ param clusterName ES clustername
* @ throws DBException exception
* @ throws UnknownHostException exception
* @ throws NumberFormatException exception */
@ Given ( "^I connect to Elasticsearch cluster at host '(.+?)'( using native port '(.+?)')?( using cluster name '(.+?)')?$" ) public void connectToElasticSearch ( String host , String foo , String nativePort , String bar , String clusterName ) throws DBException , UnknownHostException , NumberFormatException { } } | LinkedHashMap < String , Object > settings_map = new LinkedHashMap < String , Object > ( ) ; if ( clusterName != null ) { settings_map . put ( "cluster.name" , clusterName ) ; } else { settings_map . put ( "cluster.name" , ES_DEFAULT_CLUSTER_NAME ) ; } commonspec . getElasticSearchClient ( ) . setSettings ( settings_map ) ; if ( nativePort != null ) { commonspec . getElasticSearchClient ( ) . setNativePort ( Integer . valueOf ( nativePort ) ) ; } else { commonspec . getElasticSearchClient ( ) . setNativePort ( ES_DEFAULT_NATIVE_PORT ) ; } commonspec . getElasticSearchClient ( ) . setHost ( host ) ; commonspec . getElasticSearchClient ( ) . connect ( ) ; |
public class ExtraLanguageTypeConverter { /** * Indicates if the given name has a mapping to the extra language .
* @ param type the type to convert .
* @ return { @ code true } if the mapping exists . */
public boolean hasConversion ( String type ) { } } | if ( ( isImplicitSarlTypes ( ) && type . startsWith ( IMPLICIT_PACKAGE ) ) || isImplicitJvmTypes ( ) ) { return true ; } if ( this . mapping == null ) { this . mapping = initMapping ( ) ; } return this . mapping . containsKey ( type ) ; |
public class TemplateDelegateNode { /** * Calculate a DeltemplateKey for the variant .
* < p > This is done lazily so that global references can be resolved . This is not ideal since
* nothing guarantees that resolution happens before access .
* < p > Note we don ' t do validation of the variant values since that is handled by the
* TemplateDelegateNodeBuilder during construction */
private DelTemplateKey resolveVariantExpression ( ) { } } | if ( delTemplateVariantExpr == null ) { delTemplateKey = DelTemplateKey . create ( delTemplateName , "" ) ; return delTemplateKey ; } ExprNode exprNode = delTemplateVariantExpr . getRoot ( ) ; if ( exprNode instanceof GlobalNode ) { GlobalNode globalNode = ( GlobalNode ) exprNode ; if ( globalNode . isResolved ( ) ) { exprNode = globalNode . getValue ( ) ; } else { // This global was not substituted . This happens when TemplateRegistries are built for
// message extraction and parseinfo generation . To make this ' work ' we just use the Global
// name for the variant value . This is fine and will help catch some errors .
// Because these nodes won ' t be used for code generation this should be safe .
// For this reason we also don ' t store the key , instead we just return it .
return DelTemplateKey . create ( delTemplateName , globalNode . getName ( ) ) ; } } if ( exprNode instanceof IntegerNode ) { // Globals were already substituted : We may now create the definitive variant and key fields
// on this node .
long variantValue = ( ( IntegerNode ) exprNode ) . getValue ( ) ; delTemplateKey = DelTemplateKey . create ( delTemplateName , String . valueOf ( variantValue ) ) ; } else if ( exprNode instanceof StringNode ) { // Globals were already substituted : We may now create the definitive variant and key fields
// on this node .
delTemplateKey = DelTemplateKey . create ( delTemplateName , ( ( StringNode ) exprNode ) . getValue ( ) ) ; } else { // We must have already reported an error , just create an arbitrary variant expr .
delTemplateKey = DelTemplateKey . create ( delTemplateName , exprNode . toSourceString ( ) ) ; } return delTemplateKey ; |
public class OptionUtil { /** * If split size option is true , returns thread number . If false , returns split size .
* @ return If true , local mode . */
public long getSplitSize ( ) { } } | if ( cli . hasOption ( SPLIT_SIZE ) ) { return Long . valueOf ( cli . getOptionValue ( SPLIT_SIZE ) ) ; } return DEFAULT_SPLIT_SIZE ; |
public class GitHubCollectorTask { /** * Clean up unused deployment collector items
* @ param collector the { @ link Collector } */
@ SuppressWarnings ( "PMD.AvoidDeeplyNestedIfStmts" ) // agreed , fixme
private void clean ( Collector collector ) { } } | Set < ObjectId > uniqueIDs = new HashSet < > ( ) ; /* Logic : For each component , retrieve the collector item list of the type SCM .
Store their IDs in a unique set ONLY if their collector IDs match with GitHub collectors ID . */
for ( com . capitalone . dashboard . model . Component comp : dbComponentRepository . findAll ( ) ) { if ( ! MapUtils . isEmpty ( comp . getCollectorItems ( ) ) ) { List < CollectorItem > itemList = comp . getCollectorItems ( ) . get ( CollectorType . SCM ) ; if ( itemList != null ) { itemList . stream ( ) . filter ( ci -> ci != null && Objects . equals ( ci . getCollectorId ( ) , collector . getId ( ) ) ) . map ( BaseModel :: getId ) . forEach ( uniqueIDs :: add ) ; } } } /* Logic : Get all the collector items from the collector _ item collection for this collector .
If their id is in the unique set ( above ) , keep them enabled ; else , disable them . */
List < GitHubRepo > repoList = new ArrayList < > ( ) ; Set < ObjectId > gitID = new HashSet < > ( ) ; gitID . add ( collector . getId ( ) ) ; gitHubRepoRepository . findByCollectorIdIn ( gitID ) . stream ( ) . filter ( Objects :: nonNull ) . forEach ( repo -> { if ( repo . isPushed ( ) ) { return ; } repo . setEnabled ( uniqueIDs . contains ( repo . getId ( ) ) ) ; repoList . add ( repo ) ; } ) ; gitHubRepoRepository . save ( repoList ) ; |
public class CallbackSerializer { /** * { @ inheritDoc } */
@ Override public void serialize ( Callback value , JsonGenerator jgen , SerializerProvider provider ) throws IOException { } } | if ( StringUtils . isBlank ( value . getRef ( ) ) ) { jgen . writeStartObject ( ) ; for ( Entry < String , PathItem > entry : value . entrySet ( ) ) { jgen . writeObjectField ( entry . getKey ( ) , entry . getValue ( ) ) ; } jgen . writeEndObject ( ) ; } else { jgen . writeStartObject ( ) ; jgen . writeStringField ( "$ref" , value . getRef ( ) ) ; jgen . writeEndObject ( ) ; } |
public class OpCompareEqual { /** * { @ inheritDoc } */
@ Override public void resolve ( final ValueStack values ) throws Exception { } } | if ( values . size ( ) < 2 ) throw new Exception ( "missing operands for " + toString ( ) ) ; try { final Object [ ] ops = values . ensureSameTypes ( ) ; values . push ( new Boolean ( ops [ 1 ] . equals ( ops [ 0 ] ) ) ) ; } catch ( final ParseException e ) { e . fillInStackTrace ( ) ; throw new Exception ( toString ( ) + "; " + e . getMessage ( ) , e ) ; } return ; |
public class DataBindingUtils { /** * Binds the given source object to the given target object performing type conversion if necessary
* @ param entity The PersistentEntity instance
* @ param object The object to bind to
* @ param source The source object
* @ param include The list of properties to include
* @ param exclude The list of properties to exclude
* @ param filter The prefix to filter by
* @ see org . grails . datastore . mapping . model . PersistentEntity
* @ return A BindingResult if there were errors or null if it was successful */
@ SuppressWarnings ( "unchecked" ) public static BindingResult bindObjectToDomainInstance ( PersistentEntity entity , Object object , Object source , List include , List exclude , String filter ) { } } | BindingResult bindingResult = null ; GrailsApplication grailsApplication = Holders . findApplication ( ) ; try { final DataBindingSource bindingSource = createDataBindingSource ( grailsApplication , object . getClass ( ) , source ) ; final DataBinder grailsWebDataBinder = getGrailsWebDataBinder ( grailsApplication ) ; grailsWebDataBinder . bind ( object , bindingSource , filter , include , exclude ) ; } catch ( InvalidRequestBodyException e ) { String messageCode = "invalidRequestBody" ; Class objectType = object . getClass ( ) ; String defaultMessage = "An error occurred parsing the body of the request" ; String [ ] codes = getMessageCodes ( messageCode , objectType ) ; bindingResult = new BeanPropertyBindingResult ( object , objectType . getName ( ) ) ; bindingResult . addError ( new ObjectError ( bindingResult . getObjectName ( ) , codes , null , defaultMessage ) ) ; } catch ( Exception e ) { bindingResult = new BeanPropertyBindingResult ( object , object . getClass ( ) . getName ( ) ) ; bindingResult . addError ( new ObjectError ( bindingResult . getObjectName ( ) , e . getMessage ( ) ) ) ; } if ( entity != null && bindingResult != null ) { BindingResult newResult = new ValidationErrors ( object ) ; for ( Object error : bindingResult . getAllErrors ( ) ) { if ( error instanceof FieldError ) { FieldError fieldError = ( FieldError ) error ; final boolean isBlank = BLANK . equals ( fieldError . getRejectedValue ( ) ) ; if ( ! isBlank ) { newResult . addError ( fieldError ) ; } else { PersistentProperty property = entity . getPropertyByName ( fieldError . getField ( ) ) ; if ( property != null ) { final boolean isOptional = property . isNullable ( ) ; if ( ! isOptional ) { newResult . addError ( fieldError ) ; } } else { newResult . addError ( fieldError ) ; } } } else { newResult . addError ( ( ObjectError ) error ) ; } } bindingResult = newResult ; } MetaClass mc = GroovySystem . getMetaClassRegistry ( ) . getMetaClass ( object . getClass ( ) ) ; if ( mc . hasProperty ( object , "errors" ) != null && bindingResult != null ) { ValidationErrors errors = new ValidationErrors ( object ) ; errors . addAllErrors ( bindingResult ) ; mc . setProperty ( object , "errors" , errors ) ; } return bindingResult ; |
public class Models { /** * Summarize fields which are specific to hex . deeplearning . DeepLearningModel . */
private static void summarizeDeepLearningModel ( ModelSummary summary , hex . deeplearning . DeepLearningModel model ) { } } | // add generic fields such as column names
summarizeModelCommonFields ( summary , model ) ; summary . model_algorithm = "DeepLearning" ; JsonObject all_params = ( model . get_params ( ) ) . toJSON ( ) ; summary . critical_parameters = whitelistJsonObject ( all_params , DL_critical_params ) ; summary . secondary_parameters = whitelistJsonObject ( all_params , DL_secondary_params ) ; summary . expert_parameters = whitelistJsonObject ( all_params , DL_expert_params ) ; |
public class AbstractBridgeService { /** * wtb closures */
protected T newSession ( IoSessionInitializer < ? extends IoFuture > initializer , IoFuture future , Callable < T > sessionCreator ) throws Exception { } } | T session ; IoProcessorEx < T > processor = getProcessor ( ) ; synchronized ( processor ) { session = sessionCreator . call ( ) ; processor . add ( session ) ; } initSession ( session , future , initializer ) ; IoFilterChain filterChain = session . getFilterChain ( ) ; try { this . getFilterChainBuilder ( ) . buildFilterChain ( filterChain ) ; } catch ( Throwable t ) { ExceptionMonitor . getInstance ( ) . exceptionCaught ( t , session ) ; } addLoggerFilter ( session , getLogger ( ) ) ; getListeners ( ) . fireSessionCreated ( session ) ; return session ; |
public class ResourceProcessor { /** * Creates a Reference for a binding from a registered ObjectFactory .
* @ param extensionFactory the object factory info
* @ param resourceBinding the resource binding
* @ return the reference */
private Reference createExtensionFactoryReference ( ObjectFactoryInfo extensionFactory , ResourceInjectionBinding resourceBinding ) // F48603.9
{ } } | String className = extensionFactory . getObjectFactoryClass ( ) . getName ( ) ; Reference ref = new Reference ( resourceBinding . getInjectionClassTypeName ( ) , className , null ) ; if ( extensionFactory . isRefAddrNeeded ( ) ) // F48603
{ ref . add ( new ResourceInfoRefAddr ( createResourceInfo ( resourceBinding ) ) ) ; } return ref ; |
public class AbstractNodeMonitorDescriptor { /** * Performs monitoring across the board .
* @ return
* For all the computers , report the monitored values . */
protected Map < Computer , T > monitor ( ) throws InterruptedException { } } | Map < Computer , T > data = new HashMap < > ( ) ; for ( Computer c : Jenkins . getInstance ( ) . getComputers ( ) ) { try { Thread . currentThread ( ) . setName ( "Monitoring " + c . getDisplayName ( ) + " for " + getDisplayName ( ) ) ; if ( c . getChannel ( ) == null ) data . put ( c , null ) ; else data . put ( c , monitor ( c ) ) ; } catch ( RuntimeException | IOException e ) { LOGGER . log ( Level . WARNING , "Failed to monitor " + c . getDisplayName ( ) + " for " + getDisplayName ( ) , e ) ; } catch ( InterruptedException e ) { throw ( InterruptedException ) new InterruptedException ( "Node monitoring " + c . getDisplayName ( ) + " for " + getDisplayName ( ) + " aborted." ) . initCause ( e ) ; } } return data ; |
public class StAXEncoder { /** * Write text to the output
* ( non - Javadoc )
* @ see javax . xml . stream . XMLStreamWriter # writeCharacters ( char [ ] , int , int ) */
public void writeCharacters ( char [ ] text , int start , int len ) throws XMLStreamException { } } | this . writeCharacters ( new String ( text , start , len ) ) ; |
public class Promises { /** * Gets { @ code Promise } from provided { @ code AsyncSupplier } ,
* waits until it completes and than returns a { @ code Promise < Void > } */
@ NotNull public static Promise < Void > sequence ( @ NotNull AsyncSupplier < Void > promise ) { } } | return promise . get ( ) . toVoid ( ) ; |
public class XMLComparator { /** * Check that all the rows and elements defined in the doc1 are included in doc2
* @ param doc1 The source document
* @ param doc2 The checked document
* @ param primaryKeyFields A String array containing all the field names composing the primary key
* @ return True if all the elements defined in the doc1 exists in the doc2 */
public boolean compare ( Document doc1 , Document doc2 , String tableName , String [ ] primaryKeyFields ) { } } | // Remove errors of previous calls
errorsList . clear ( ) ; // Iterator all the Row
List < HashMap < String , String > > doc1Hash = createRowsHashMap ( doc1 , tableName ) ; List < HashMap < String , String > > doc2Hash = createRowsHashMap ( doc2 , tableName ) ; for ( HashMap < String , String > hash1 : doc1Hash ) { // System . out . println ( hash1 . toString ( ) ) ;
boolean found = false ; for ( HashMap < String , String > hash2 : doc2Hash ) { int i = 0 ; while ( i < primaryKeyFields . length ) { String key = primaryKeyFields [ i ] ; if ( ! hash2 . containsKey ( key ) ) { // Error the primary key field is not present !
errorsList . add ( "Error the primary key field is not present!" ) ; } String value1 = hash1 . get ( key ) ; String value2 = hash2 . get ( key ) ; if ( value1 . equals ( value2 ) ) { // System . out . println ( " One part is found ! " ) ;
} else { // System . out . println ( " not the row " ) ;
break ; } i ++ ; } found = ( i == primaryKeyFields . length ) ; if ( found ) { // System . out . println ( " The primary key is found ! " ) ;
// The matching row is found , check the content of all fields
for ( String key : hash1 . keySet ( ) ) { String value1 = hash1 . get ( key ) ; String value2 = hash2 . get ( key ) ; if ( ! value1 . equals ( value2 ) ) { errorsList . add ( "Expected to get " + value1 + " for the field " + key + " but got " + value2 + " for row" + getPrimaryKeysExtraDetails ( hash1 , primaryKeyFields ) ) ; } } break ; } } // The row with the primary key is not found
if ( ! found ) { errorsList . add ( "Cannot find row " + getPrimaryKeysExtraDetails ( hash1 , primaryKeyFields ) ) ; } } return ( errorsList . size ( ) == 0 ) ; |
public class NumberFormat { /** * Formats a number and appends the resulting text to the given string buffer .
* < strong > [ icu ] Note : < / strong > recognizes < code > BigInteger < / code >
* and < code > BigDecimal < / code > objects .
* @ see java . text . Format # format ( Object , StringBuffer , FieldPosition ) */
@ Override public StringBuffer format ( Object number , StringBuffer toAppendTo , FieldPosition pos ) { } } | if ( number instanceof Long ) { return format ( ( ( Long ) number ) . longValue ( ) , toAppendTo , pos ) ; } else if ( number instanceof BigInteger ) { return format ( ( BigInteger ) number , toAppendTo , pos ) ; } else if ( number instanceof java . math . BigDecimal ) { return format ( ( java . math . BigDecimal ) number , toAppendTo , pos ) ; } else if ( number instanceof android . icu . math . BigDecimal ) { return format ( ( android . icu . math . BigDecimal ) number , toAppendTo , pos ) ; } else if ( number instanceof CurrencyAmount ) { return format ( ( CurrencyAmount ) number , toAppendTo , pos ) ; } else if ( number instanceof Number ) { return format ( ( ( Number ) number ) . doubleValue ( ) , toAppendTo , pos ) ; } else { throw new IllegalArgumentException ( "Cannot format given Object as a Number" ) ; } |
public class SimplePathEnumerator { /** * Enumerate the simple paths .
* @ return this object */
public SimplePathEnumerator enumerate ( ) { } } | Iterator < Edge > entryOut = cfg . outgoingEdgeIterator ( cfg . getEntry ( ) ) ; if ( ! entryOut . hasNext ( ) ) { throw new IllegalStateException ( ) ; } Edge entryEdge = entryOut . next ( ) ; LinkedList < Edge > init = new LinkedList < > ( ) ; init . add ( entryEdge ) ; work ( init ) ; if ( DEBUG && work == maxWork ) { System . out . println ( "**** Reached max work! ****" ) ; } return this ; |
public class RuleBasedNumberFormat { /** * Reads this object in from a stream .
* @ param in The stream to read from . */
private void readObject ( java . io . ObjectInputStream in ) throws java . io . IOException { } } | // read the description in from the stream
String description = in . readUTF ( ) ; ULocale loc ; try { loc = ( ULocale ) in . readObject ( ) ; } catch ( Exception e ) { loc = ULocale . getDefault ( Category . FORMAT ) ; } try { roundingMode = in . readInt ( ) ; } catch ( Exception ignored ) { } // build a brand - new RuleBasedNumberFormat from the description ,
// then steal its substructure . This object ' s substructure and
// the temporary RuleBasedNumberFormat drop on the floor and
// get swept up by the garbage collector
RuleBasedNumberFormat temp = new RuleBasedNumberFormat ( description , loc ) ; ruleSets = temp . ruleSets ; ruleSetsMap = temp . ruleSetsMap ; defaultRuleSet = temp . defaultRuleSet ; publicRuleSetNames = temp . publicRuleSetNames ; decimalFormatSymbols = temp . decimalFormatSymbols ; decimalFormat = temp . decimalFormat ; locale = temp . locale ; defaultInfinityRule = temp . defaultInfinityRule ; defaultNaNRule = temp . defaultNaNRule ; |
public class RestletUtilSesameRealm { /** * Builds a SPARQL query to retrieve details of a RestletUtilUser . This method could be
* overridden to search for other information regarding a user .
* @ param userIdentifier
* The unique identifier of the User to search for .
* @ param findAllUsers True to find all users , and false to only find the specified user .
* @ return A String representation of the SPARQL Select query */
protected String buildSparqlQueryToFindUser ( final String userIdentifier , boolean findAllUsers ) { } } | if ( ! findAllUsers && userIdentifier == null ) { throw new NullPointerException ( "User identifier was null" ) ; } final StringBuilder query = new StringBuilder ( ) ; query . append ( " SELECT ?userIdentifier ?userUri ?userSecret ?userFirstName ?userLastName ?userEmail " ) ; query . append ( " WHERE " ) ; query . append ( " { " ) ; query . append ( " ?userUri a " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USER ) ) ; query . append ( " . " ) ; query . append ( " ?userUri " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USERIDENTIFIER ) ) ; query . append ( " ?userIdentifier . " ) ; query . append ( " OPTIONAL{ ?userUri " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USERSECRET ) ) ; query . append ( " ?userSecret . } " ) ; query . append ( " OPTIONAL{ ?userUri " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USERFIRSTNAME ) ) ; query . append ( " ?userFirstName . } " ) ; query . append ( " OPTIONAL{ ?userUri " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USERLASTNAME ) ) ; query . append ( " ?userLastName . } " ) ; query . append ( " OPTIONAL{ ?userUri " ) ; query . append ( RenderUtils . getSPARQLQueryString ( SesameRealmConstants . OAS_USEREMAIL ) ) ; query . append ( " ?userEmail . } " ) ; if ( ! findAllUsers ) { query . append ( " FILTER(str(?userIdentifier) = \"" + RenderUtils . escape ( userIdentifier ) + "\") " ) ; } query . append ( " } " ) ; return query . toString ( ) ; |
public class RebalanceUtils { /** * Print log to the following logger ( Info level )
* @ param batchId Task id
* @ param logger Logger class
* @ param message The message to print */
public static void printBatchLog ( int batchId , Logger logger , String message ) { } } | logger . info ( "[Rebalance batch id " + batchId + "] " + message ) ; |
public class EventPublisherApplicationAdapter { /** * { @ inheritDoc } */
@ Override public void fromAdmin ( Message message , SessionID sessionId ) { } } | publishEvent ( FromAdmin . of ( message , sessionId ) ) ; |
public class VaultsInner { /** * Update access policies in a key vault in the specified subscription .
* @ param resourceGroupName The name of the Resource Group to which the vault belongs .
* @ param vaultName Name of the vault
* @ param operationKind Name of the operation . Possible values include : ' add ' , ' replace ' , ' remove '
* @ param properties Properties of the access policy
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VaultAccessPolicyParametersInner object if successful . */
public VaultAccessPolicyParametersInner updateAccessPolicy ( String resourceGroupName , String vaultName , AccessPolicyUpdateKind operationKind , VaultAccessPolicyProperties properties ) { } } | return updateAccessPolicyWithServiceResponseAsync ( resourceGroupName , vaultName , operationKind , properties ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CmsCategoryService { /** * Creates a new category . < p >
* Will use the same category repository as the parent if specified ,
* or the closest category repository to the reference path if specified ,
* or the centralized category repository in all other cases . < p >
* @ param cms the current cms context
* @ param parent the parent category or < code > null < / code > for a new top level category
* @ param name the name of the new category
* @ param title the title
* @ param description the description
* @ param referencePath the reference path for the category repository
* @ return the new created category
* @ throws CmsException if something goes wrong */
public CmsCategory createCategory ( CmsObject cms , CmsCategory parent , String name , String title , String description , String referencePath ) throws CmsException { } } | List < CmsProperty > properties = new ArrayList < CmsProperty > ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( title ) ) { properties . add ( new CmsProperty ( CmsPropertyDefinition . PROPERTY_TITLE , title , null ) ) ; } if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( description ) ) { properties . add ( new CmsProperty ( CmsPropertyDefinition . PROPERTY_DESCRIPTION , description , null ) ) ; } String folderPath = "" ; if ( parent != null ) { folderPath += parent . getRootPath ( ) ; } else { if ( referencePath == null ) { folderPath += CmsCategoryService . CENTRALIZED_REPOSITORY ; } else { List < String > repositories = getCategoryRepositories ( cms , referencePath ) ; // take the last one
folderPath = repositories . get ( repositories . size ( ) - 1 ) ; } } folderPath = cms . getRequestContext ( ) . removeSiteRoot ( internalCategoryRootPath ( folderPath , name ) ) ; CmsResource resource ; try { resource = cms . createResource ( folderPath , CmsResourceTypeFolder . RESOURCE_TYPE_ID , null , properties ) ; } catch ( CmsVfsResourceNotFoundException e ) { // may be is the centralized repository missing , try to create it
cms . createResource ( CmsCategoryService . CENTRALIZED_REPOSITORY , CmsResourceTypeFolder . RESOURCE_TYPE_ID ) ; // now try again
resource = cms . createResource ( folderPath , CmsResourceTypeFolder . RESOURCE_TYPE_ID , null , properties ) ; } return getCategory ( cms , resource ) ; |
public class GetAdUnitHierarchy { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } } | // Get the InventoryService .
InventoryServiceInterface inventoryService = adManagerServices . get ( session , InventoryServiceInterface . class ) ; // Get the NetworkService .
NetworkServiceInterface networkService = adManagerServices . get ( session , NetworkServiceInterface . class ) ; // Get the effective root ad unit .
String rootAdUnitId = networkService . getCurrentNetwork ( ) . getEffectiveRootAdUnitId ( ) ; // Create a statement to select only the root ad unit by ID .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "id = :id" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "id" , rootAdUnitId ) ; AdUnitPage page = inventoryService . getAdUnitsByStatement ( statementBuilder . toStatement ( ) ) ; AdUnit effectiveRootAdUnit = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; // Get all ad units .
List < AdUnit > adUnits = getAllAdUnits ( adManagerServices , session ) ; buildAndDisplayAdUnitTree ( effectiveRootAdUnit , adUnits ) ; |
public class BehaviorHistoryInfo { /** * 对应insertCount的数据统计平均值 */
public Long getInsertCountAvg ( ) { } } | Long insertCountAvg = 0L ; if ( items . size ( ) != 0 ) { for ( TableStat item : items ) { if ( item . getEndTime ( ) . equals ( item . getStartTime ( ) ) ) { insertCountAvg += item . getInsertCount ( ) ; } else { insertCountAvg += item . getInsertCount ( ) * 1000 / ( item . getEndTime ( ) . getTime ( ) - item . getStartTime ( ) . getTime ( ) ) ; } } insertCountAvg = insertCountAvg / items . size ( ) ; } return insertCountAvg ; |
public class ArtifactHelpers { /** * Exclusive can be specified by wildcard :
* - - groupId : artifactId : * : *
* - - groupId : * : * : *
* And in general , to require a strict match up to the version and the classifier is not necessary
* TODO : the correct fix would be to rewrite { @ link Exclusion # equals ( Object ) } , but what about the boundary case :
* If contains ( id1 : * : * : * , id1 : id2 : * : * ) = = true , then that ' s equals ? ?
* TODO : there must be useful code in Aether or maven on this topic
* @ param exclusions Collection of exclusions
* @ param exclusion Specific exclusion to search through collection for
* @ return boolean */
public static boolean containsExclusion ( Collection < Exclusion > exclusions , Exclusion exclusion ) { } } | return Optional . ofNullable ( exclusions ) . orElse ( Collections . emptyList ( ) ) . stream ( ) . anyMatch ( selectedExclusion -> null != exclusion && selectedExclusion . getGroupId ( ) . equals ( exclusion . getGroupId ( ) ) && ( selectedExclusion . getArtifactId ( ) . equals ( exclusion . getArtifactId ( ) ) || ( selectedExclusion . getArtifactId ( ) . equals ( ARTIFACT_STAR ) ) ) ) ; |
public class CPDefinitionLinkLocalServiceBaseImpl { /** * Returns the cp definition link matching the UUID and group .
* @ param uuid the cp definition link ' s UUID
* @ param groupId the primary key of the group
* @ return the matching cp definition link
* @ throws PortalException if a matching cp definition link could not be found */
@ Override public CPDefinitionLink getCPDefinitionLinkByUuidAndGroupId ( String uuid , long groupId ) throws PortalException { } } | return cpDefinitionLinkPersistence . findByUUID_G ( uuid , groupId ) ; |
public class JobReceiver { /** * 添加Repeat 任务 */
private void addRepeatJob ( JobPo jobPo ) throws DupEntryException { } } | if ( appContext . getCronJobQueue ( ) . getJob ( jobPo . getTaskTrackerNodeGroup ( ) , jobPo . getTaskId ( ) ) != null ) { // 这种情况是 由cron 任务变为了 repeat 任务
throw new DupEntryException ( ) ; } // 1 . add to repeat job queue
appContext . getRepeatJobQueue ( ) . add ( jobPo ) ; if ( JobUtils . isRelyOnPrevCycle ( jobPo ) ) { // 没有正在执行 , 则添加
if ( appContext . getExecutingJobQueue ( ) . getJob ( jobPo . getTaskTrackerNodeGroup ( ) , jobPo . getTaskId ( ) ) == null ) { // 2 . add to executable queue
try { jobPo . setRepeatedCount ( 1 ) ; // 第一次job的repeatedCount为1
jobPo . setInternalExtParam ( Constants . EXE_SEQ_ID , JobUtils . generateExeSeqId ( jobPo ) ) ; appContext . getExecutableJobQueue ( ) . add ( jobPo ) ; } catch ( DupEntryException e ) { appContext . getRepeatJobQueue ( ) . remove ( jobPo . getJobId ( ) ) ; throw e ; } } } else { // 对于不需要依赖上一周期的 , 采取批量生成的方式
appContext . getNonRelyOnPrevCycleJobScheduler ( ) . addScheduleJobForOneHour ( jobPo ) ; } |
public class AnalyticsServiceElasticsearch { /** * This method adds node information to the communication summary nodes constructed based on the
* communication details .
* @ param stats The map of endpoint ( uri [ op ] ) to communication summary stat nodes
* @ param index The index
* @ param criteria The query criteria
* @ param addMetrics Whether to add metrics or just discover any missing nodes
* @ param clients Whether node information should be located for clients ( i . e . fragments with
* top level Producer node ) */
protected void addNodeInformation ( Map < String , CommunicationSummaryStatistics > stats , String index , Criteria criteria , boolean addMetrics , boolean clients ) { } } | BoolQueryBuilder query = buildQuery ( criteria , ElasticsearchUtil . TRANSACTION_FIELD , null ) ; // Obtain information about the fragments
StatsBuilder durationBuilder = AggregationBuilders . stats ( "elapsed" ) . field ( ElasticsearchUtil . ELAPSED_FIELD ) ; TermsBuilder serviceTerm = AggregationBuilders . terms ( "serviceTerm" ) . field ( ElasticsearchUtil . PROPERTIES_VALUE_FIELD ) ; FilterAggregationBuilder propertiesServiceFilter = AggregationBuilders . filter ( "propertiesServiceFilter" ) . filter ( FilterBuilders . queryFilter ( QueryBuilders . boolQuery ( ) . must ( QueryBuilders . matchQuery ( ElasticsearchUtil . PROPERTIES_NAME_FIELD , Constants . PROP_SERVICE_NAME ) ) ) ) . subAggregation ( serviceTerm ) ; NestedBuilder nestedProperties = AggregationBuilders . nested ( "nestedProperties" ) . path ( ElasticsearchUtil . PROPERTIES_FIELD ) . subAggregation ( propertiesServiceFilter ) ; TermsBuilder operationsBuilder2 = AggregationBuilders . terms ( "operations" ) . field ( ElasticsearchUtil . OPERATION_FIELD ) . size ( criteria . getMaxResponseSize ( ) ) . subAggregation ( durationBuilder ) . subAggregation ( nestedProperties ) ; MissingBuilder missingOperationBuilder2 = AggregationBuilders . missing ( "missingOperation" ) . field ( ElasticsearchUtil . OPERATION_FIELD ) . subAggregation ( durationBuilder ) . subAggregation ( nestedProperties ) ; TermsBuilder urisBuilder2 = AggregationBuilders . terms ( "uris" ) . field ( ElasticsearchUtil . URI_FIELD ) . size ( criteria . getMaxResponseSize ( ) ) . subAggregation ( operationsBuilder2 ) . subAggregation ( missingOperationBuilder2 ) ; MissingBuilder missingUriBuilder2 = AggregationBuilders . missing ( "missingUri" ) . field ( ElasticsearchUtil . URI_FIELD ) . subAggregation ( operationsBuilder2 ) . subAggregation ( missingOperationBuilder2 ) ; query = query . must ( QueryBuilders . matchQuery ( "initial" , "true" ) ) ; // If interested in clients , then need to identify node details for Producers
if ( clients ) { query = query . must ( QueryBuilders . matchQuery ( "type" , "Producer" ) ) ; } else { query = query . mustNot ( QueryBuilders . matchQuery ( "type" , "Producer" ) ) ; } SearchRequestBuilder request2 = getBaseSearchRequestBuilder ( NODE_DETAILS_TYPE , index , criteria , query , 0 ) ; request2 . addAggregation ( urisBuilder2 ) . addAggregation ( missingUriBuilder2 ) ; SearchResponse response2 = getSearchResponse ( request2 ) ; Terms completions = response2 . getAggregations ( ) . get ( "uris" ) ; for ( Terms . Bucket urisBucket : completions . getBuckets ( ) ) { String uri = urisBucket . getKey ( ) ; if ( clients ) { uri = EndpointUtil . encodeClientURI ( uri ) ; } for ( Terms . Bucket operationBucket : urisBucket . getAggregations ( ) . < Terms > get ( "operations" ) . getBuckets ( ) ) { Stats elapsed = operationBucket . getAggregations ( ) . get ( "elapsed" ) ; String id = EndpointUtil . encodeEndpoint ( uri , operationBucket . getKey ( ) ) ; CommunicationSummaryStatistics css = stats . get ( id ) ; if ( css == null ) { css = new CommunicationSummaryStatistics ( ) ; css . setId ( id ) ; css . setUri ( uri ) ; css . setOperation ( operationBucket . getKey ( ) ) ; stats . put ( id , css ) ; } if ( addMetrics ) { doAddMetrics ( css , elapsed , operationBucket . getDocCount ( ) ) ; } String serviceName = serviceName ( operationBucket . getAggregations ( ) . < Nested > get ( "nestedProperties" ) . getAggregations ( ) . < Filter > get ( "propertiesServiceFilter" ) . getAggregations ( ) . get ( "serviceTerm" ) ) ; if ( serviceName != null ) { css . setServiceName ( serviceName ) ; } } Missing missingOp = urisBucket . getAggregations ( ) . get ( "missingOperation" ) ; if ( missingOp . getDocCount ( ) > 0 ) { Stats elapsed = missingOp . getAggregations ( ) . get ( "elapsed" ) ; String id = EndpointUtil . encodeEndpoint ( uri , null ) ; CommunicationSummaryStatistics css = stats . get ( id ) ; if ( css == null ) { css = new CommunicationSummaryStatistics ( ) ; css . setId ( id ) ; css . setUri ( uri ) ; stats . put ( id , css ) ; } if ( addMetrics ) { doAddMetrics ( css , elapsed , missingOp . getDocCount ( ) ) ; } String serviceName = serviceName ( missingOp . getAggregations ( ) . < Nested > get ( "nestedProperties" ) . getAggregations ( ) . < Filter > get ( "propertiesServiceFilter" ) . getAggregations ( ) . get ( "serviceTerm" ) ) ; if ( serviceName != null ) { css . setServiceName ( serviceName ) ; } } } Missing missingUri = response2 . getAggregations ( ) . get ( "missingUri" ) ; if ( missingUri . getDocCount ( ) > 0 ) { Terms operations = missingUri . getAggregations ( ) . get ( "operations" ) ; for ( Terms . Bucket operationBucket : operations . getBuckets ( ) ) { Stats elapsed = operationBucket . getAggregations ( ) . get ( "elapsed" ) ; String id = EndpointUtil . encodeEndpoint ( null , operationBucket . getKey ( ) ) ; CommunicationSummaryStatistics css = stats . get ( id ) ; if ( css == null ) { css = new CommunicationSummaryStatistics ( ) ; css . setId ( id ) ; css . setOperation ( operationBucket . getKey ( ) ) ; stats . put ( id , css ) ; } String serviceName = serviceName ( operationBucket . getAggregations ( ) . < Nested > get ( "nestedProperties" ) . getAggregations ( ) . < Filter > get ( "propertiesServiceFilter" ) . getAggregations ( ) . get ( "serviceTerm" ) ) ; if ( serviceName != null ) { css . setServiceName ( serviceName ) ; } if ( addMetrics ) { doAddMetrics ( css , elapsed , operationBucket . getDocCount ( ) ) ; } } } |
public class Person { /** * Returns for given parameter < i > _ name < / i > the instance of class
* { @ link Person } .
* @ param _ name name to search in the cache
* @ throws EFapsException on error
* @ return instance of class { @ link Person }
* @ see # CACHE
* @ see # getFromDB */
public static Person get ( final String _name ) throws EFapsException { } } | final Cache < String , Person > cache = InfinispanCache . get ( ) . < String , Person > getCache ( Person . NAMECACHE ) ; if ( ! cache . containsKey ( _name ) ) { Person . getPersonFromDB ( Person . SQL_NAME , _name ) ; } return cache . get ( _name ) ; |
public class RelationalOperations { /** * Returns true if polyline _ a is disjoint from polyline _ b . */
private static boolean polylineDisjointPolyline_ ( Polyline polyline_a , Polyline polyline_b , double tolerance , ProgressTracker progress_tracker ) { } } | // Quick rasterize test to see whether the the geometries are disjoint .
if ( tryRasterizedContainsOrDisjoint_ ( polyline_a , polyline_b , tolerance , false ) == Relation . disjoint ) return true ; MultiPathImpl multi_path_impl_a = ( MultiPathImpl ) polyline_a . _getImpl ( ) ; MultiPathImpl multi_path_impl_b = ( MultiPathImpl ) polyline_b . _getImpl ( ) ; PairwiseIntersectorImpl intersector_paths = new PairwiseIntersectorImpl ( multi_path_impl_a , multi_path_impl_b , tolerance , true ) ; if ( ! intersector_paths . next ( ) ) return false ; return ! linearPathIntersectsLinearPath_ ( polyline_a , polyline_b , tolerance ) ; |
public class UpdateClientCertificateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateClientCertificateRequest updateClientCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateClientCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateClientCertificateRequest . getClientCertificateId ( ) , CLIENTCERTIFICATEID_BINDING ) ; protocolMarshaller . marshall ( updateClientCertificateRequest . getPatchOperations ( ) , PATCHOPERATIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AwsSecurityFindingFilters { /** * The text of a note .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setNoteText ( java . util . Collection ) } or { @ link # withNoteText ( java . util . Collection ) } if you want to override
* the existing values .
* @ param noteText
* The text of a note .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withNoteText ( StringFilter ... noteText ) { } } | if ( this . noteText == null ) { setNoteText ( new java . util . ArrayList < StringFilter > ( noteText . length ) ) ; } for ( StringFilter ele : noteText ) { this . noteText . add ( ele ) ; } return this ; |
public class ST_RemoveRepeatedPoints { /** * Removes duplicated coordinates within a GeometryCollection
* @ param geometryCollection
* @ param tolerance to delete the coordinates
* @ return
* @ throws java . sql . SQLException */
public static GeometryCollection removeDuplicateCoordinates ( GeometryCollection geometryCollection , double tolerance ) throws SQLException { } } | ArrayList < Geometry > geoms = new ArrayList < > ( ) ; for ( int i = 0 ; i < geometryCollection . getNumGeometries ( ) ; i ++ ) { Geometry geom = geometryCollection . getGeometryN ( i ) ; geoms . add ( removeDuplicateCoordinates ( geom , tolerance ) ) ; } return FACTORY . createGeometryCollection ( GeometryFactory . toGeometryArray ( geoms ) ) ; |
public class GitlabAPI { /** * Delete a group .
* @ param groupId the group id
* @ throws IOException on gitlab api call error */
public void deleteGroup ( Integer groupId ) throws IOException { } } | String tailUrl = GitlabGroup . URL + "/" + groupId ; retrieve ( ) . method ( DELETE ) . to ( tailUrl , Void . class ) ; |
public class Session { /** * Run some code in session
* @ param id session id
* @ return */
public static String start ( String id ) { } } | if ( idLocal . get ( ) != null ) { return null ; } else { MDC . put ( "sessionId" , id ) ; if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "Initialize session id = " + id ) ; idLocal . set ( id ) ; return id ; } |
public class CPDAvailabilityEstimatePersistenceImpl { /** * Returns the last cpd availability estimate in the ordered set where uuid = & # 63 ; .
* @ param uuid the uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cpd availability estimate
* @ throws NoSuchCPDAvailabilityEstimateException if a matching cpd availability estimate could not be found */
@ Override public CPDAvailabilityEstimate findByUuid_Last ( String uuid , OrderByComparator < CPDAvailabilityEstimate > orderByComparator ) throws NoSuchCPDAvailabilityEstimateException { } } | CPDAvailabilityEstimate cpdAvailabilityEstimate = fetchByUuid_Last ( uuid , orderByComparator ) ; if ( cpdAvailabilityEstimate != null ) { return cpdAvailabilityEstimate ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCPDAvailabilityEstimateException ( msg . toString ( ) ) ; |
public class SequenceMixin { /** * Performs a linear search of the given Sequence for the given compound .
* Once we find the compound we return the position . */
public static < C extends Compound > int indexOf ( Sequence < C > sequence , C compound ) { } } | int index = 1 ; for ( C currentCompound : sequence ) { if ( currentCompound . equals ( compound ) ) { return index ; } index ++ ; } return 0 ; |
public class PdfGraphics2D { /** * Sets the current font . */
public void setFont ( Font f ) { } } | if ( f == null ) return ; if ( onlyShapes ) { font = f ; return ; } if ( f == font ) return ; font = f ; fontSize = f . getSize2D ( ) ; baseFont = getCachedBaseFont ( f ) ; |
public class PlanNodeTree { /** * Load plan nodes from the " PLAN _ NODE " array . All the nodes are from
* a substatement with the id = stmtId
* @ param stmtId
* @ param jArray - PLAN _ NODES
* @ param db
* @ throws JSONException */
private void loadPlanNodesFromJSONArrays ( int stmtId , JSONArray jArray , Database db ) { } } | List < AbstractPlanNode > planNodes = new ArrayList < > ( ) ; int size = jArray . length ( ) ; try { for ( int i = 0 ; i < size ; i ++ ) { JSONObject jobj = jArray . getJSONObject ( i ) ; String nodeTypeStr = jobj . getString ( "PLAN_NODE_TYPE" ) ; PlanNodeType nodeType = PlanNodeType . get ( nodeTypeStr ) ; AbstractPlanNode apn = nodeType . getPlanNodeClass ( ) . newInstance ( ) ; apn . loadFromJSONObject ( jobj , db ) ; planNodes . add ( apn ) ; } // link children and parents
for ( int i = 0 ; i < size ; i ++ ) { JSONObject jobj = jArray . getJSONObject ( i ) ; if ( jobj . has ( "CHILDREN_IDS" ) ) { AbstractPlanNode parent = planNodes . get ( i ) ; JSONArray children = jobj . getJSONArray ( "CHILDREN_IDS" ) ; for ( int j = 0 ; j < children . length ( ) ; j ++ ) { AbstractPlanNode child = getNodeofId ( children . getInt ( j ) , planNodes ) ; parent . addAndLinkChild ( child ) ; } } } m_planNodesListMap . put ( stmtId , planNodes ) ; } catch ( JSONException | InstantiationException | IllegalAccessException e ) { System . err . println ( e ) ; e . printStackTrace ( ) ; } |
public class LineNumberPanel { /** * Get the JscrollPane that contains an editor pane , or null if none .
* @ param editorPane an editor pane
* @ return the JscrollPane that contains the editor pane , or null if none */
public JScrollPane getScrollPane ( JTextComponent editorPane ) { } } | Container p = editorPane . getParent ( ) ; while ( p != null ) { if ( p instanceof JScrollPane ) { return ( JScrollPane ) p ; } p = p . getParent ( ) ; } return null ; |
public class GBSNode { /** * Find the insert point for a new key .
* < p > Find the insert point for a new key . This method finds the point
* AFTER which the new key should be inserted . The key does not
* need to be bounded by the node value and duplicates are allowed .
* If the new key is less than the lowest value already in the node ,
* - 1 is returned as the insert point . < / p >
* < p > If the node is full , the PRE - insert point returned may be the
* right - most slot in the node . In that case , the new key REPLACES
* the maximum value in the node . < / p >
* @ param lower Lower bound for search
* @ param upper Upper bound for search
* @ param new1 New Object to be inserted
* @ param point Found insertion point */
private void findIndex ( int lower , int upper , Object new1 , NodeInsertPoint point ) { } } | int nkeys = numKeys ( lower , upper ) ; if ( nkeys < 4 ) sequentialFindIndex ( lower , upper , new1 , point ) ; else binaryFindIndex ( lower , upper , new1 , point ) ; |
public class PersistentMessageStoreImpl { /** * Stops the Persistent Message Store .
* @ param mode specifies the type of stop operation which is to
* be performed . */
public void stop ( int mode ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "stop" , "Mode=" + mode ) ; // Defect 363755
// Stop any new work coming in as soon as we start stopping .
// This way we make sure nothing is attempted during the
// ( possibly long ) time it takes for the OM to shutdown .
_available = false ; // Defect 496893
// Set the shutdown flag so we can tell if the callback from
// the object manager was requested or not .
_shutdownRequested = true ; // Defect 549131
// Check to see if we have an active startup thread
if ( _starting ) { // Synchronize with the startup thread
synchronized ( this ) { // We have an active startup thread so we need
// to inform it that it should give up .
_starting = false ; // We may need to wake up the startup thread for
// it to check the flag so notify .
notify ( ) ; } } // Defect 530772
// Stop spill dispatcher first so that it
// can flush it ' s buffers before we stop the
// object manager .
if ( _spillDispatcher != null ) { _spillDispatcher . stop ( mode ) ; _spillDispatcher = null ; } _batchingContextFactory = null ; if ( _objectManager != null ) { try { _objectManager . shutdown ( ) ; } catch ( ObjectManagerException ome ) { com . ibm . ws . ffdc . FFDCFilter . processException ( ome , "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.stop" , "1:764:1.81.1.6" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) SibTr . event ( this , tc , "Unexpected exception caught stopping persistent message store!" , ome ) ; } _objectManager = null ; _permanentStore = null ; _temporaryStore = null ; } if ( _uniqueKeyManager != null ) { _uniqueKeyManager . stop ( ) ; _uniqueKeyManager = null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "stop" ) ; |
public class BaseProcessRecords { /** * Kind of convert file filter to regex . */
public String patternToRegex ( String string ) { } } | if ( string != null ) if ( ! string . contains ( "[" ) ) if ( ! string . contains ( "{" ) ) // If it has one of these , it probably is a regex already .
if ( ! string . contains ( "\\." ) ) { string = string . replace ( "." , "\\." ) ; string = string . replace ( "*" , ".*" ) ; } return string ; |
public class Plane { /** * Sets the parameters of the plane .
* @ return a reference to this plane ( for chaining ) . */
public Plane set ( float a , float b , float c , float d ) { } } | _normal . set ( a , b , c ) ; constant = d ; return this ; |
public class Interval { /** * Utility method to get the local start date time . This method combines the
* start date and the start time to create a date time object .
* @ return the start local date time
* @ see # getStartDate ( )
* @ see # getStartTime ( ) */
public LocalDateTime getStartDateTime ( ) { } } | if ( startDateTime == null ) { startDateTime = LocalDateTime . of ( getStartDate ( ) , getStartTime ( ) ) ; } return startDateTime ; |
public class TreeCoreset { /** * computes the cost of point p with the centre of treenode node */
double treeNodeCostOfPoint ( treeNode node , Point p ) { } } | if ( p . weight == 0.0 ) { return 0.0 ; } // stores the distance between centre and p
double distance = 0.0 ; // loop counter variable
int l ; for ( l = 0 ; l < p . dimension ; l ++ ) { // centroid coordinate of the point
double centroidCoordinatePoint ; if ( p . weight != 0.0 ) { centroidCoordinatePoint = p . coordinates [ l ] / p . weight ; } else { centroidCoordinatePoint = p . coordinates [ l ] ; } // centroid coordinate of the centre
double centroidCoordinateCentre ; if ( node . centre . weight != 0.0 ) { centroidCoordinateCentre = node . centre . coordinates [ l ] / node . centre . weight ; } else { centroidCoordinateCentre = node . centre . coordinates [ l ] ; } distance += ( centroidCoordinatePoint - centroidCoordinateCentre ) * ( centroidCoordinatePoint - centroidCoordinateCentre ) ; } return distance * p . weight ; |
public class ReceiveListenerDispatcher { /** * Allocates an error occurred object from an object pool . */
private ConversationReceiveListenerErrorOccurredInvocation allocateErrorOccurredInvocation ( Connection connection , ConversationReceiveListener listener , SIConnectionLostException exception , int segmentType , int requestNumber , int priority , Conversation conversation ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "allocateErrorOccurredInvocation" , new Object [ ] { connection , listener , exception , "" + segmentType , "" + requestNumber , "" + priority , conversation } ) ; ConversationReceiveListenerErrorOccurredInvocation retInvocation = ( ConversationReceiveListenerErrorOccurredInvocation ) conversationReceiveListenerErrorOccurredInvocationPool . remove ( ) ; if ( retInvocation == null ) { retInvocation = new ConversationReceiveListenerErrorOccurredInvocation ( connection , listener , exception , segmentType , requestNumber , priority , conversation , conversationReceiveListenerErrorOccurredInvocationPool ) ; } else { retInvocation . reset ( connection , listener , exception , segmentType , requestNumber , priority , conversation ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "allocateDataReceivedInvocation" , retInvocation ) ; return retInvocation ; |
public class VAlarm { /** * { @ inheritDoc } */
public final void validate ( final boolean recurse ) throws ValidationException { } } | /* * ; ' action ' and ' trigger ' are both REQUIRED , ; but MUST NOT occur more than once action / trigger / */
PropertyValidator . getInstance ( ) . assertOne ( Property . ACTION , getProperties ( ) ) ; PropertyValidator . getInstance ( ) . assertOne ( Property . TRIGGER , getProperties ( ) ) ; /* * ; ' duration ' and ' repeat ' are both optional , ; and MUST NOT occur more than once each , ; but if one occurs ,
* so MUST the other duration / repeat / */
PropertyValidator . getInstance ( ) . assertOneOrLess ( Property . DURATION , getProperties ( ) ) ; PropertyValidator . getInstance ( ) . assertOneOrLess ( Property . REPEAT , getProperties ( ) ) ; try { PropertyValidator . getInstance ( ) . assertNone ( Property . DURATION , getProperties ( ) ) ; PropertyValidator . getInstance ( ) . assertNone ( Property . REPEAT , getProperties ( ) ) ; } catch ( ValidationException ve ) { PropertyValidator . getInstance ( ) . assertOne ( Property . DURATION , getProperties ( ) ) ; PropertyValidator . getInstance ( ) . assertOne ( Property . REPEAT , getProperties ( ) ) ; } /* * ; the following is optional , ; and MAY occur more than once x - prop */
final Validator actionValidator = actionValidators . get ( getAction ( ) ) ; if ( actionValidator != null ) { actionValidator . validate ( this ) ; } if ( recurse ) { validateProperties ( ) ; } |
public class RaftServiceContext { /** * Unregister the given session .
* @ param index The index of the unregister .
* @ param timestamp The timestamp of the unregister .
* @ param session The session to unregister .
* @ param expired Whether the session was expired by the leader . */
public void closeSession ( long index , long timestamp , RaftSession session , boolean expired ) { } } | log . debug ( "Closing session {}" , session . sessionId ( ) ) ; // Update the session ' s timestamp to prevent it from being expired .
session . setLastUpdated ( timestamp ) ; // Update the state machine index / timestamp .
tick ( index , timestamp ) ; // Expire sessions that have timed out .
expireSessions ( currentTimestamp ) ; // Remove the session from the sessions list .
if ( expired ) { session = sessions . removeSession ( session . sessionId ( ) ) ; if ( session != null ) { session . expire ( ) ; service . expire ( session . sessionId ( ) ) ; } } else { session = sessions . removeSession ( session . sessionId ( ) ) ; if ( session != null ) { session . close ( ) ; service . close ( session . sessionId ( ) ) ; } } // Commit the index , causing events to be sent to clients if necessary .
commit ( ) ; |
public class DataProviderContext { /** * Adds more triggers to the validator .
* @ param triggers Triggers to be added .
* @ return Same data provider context . */
public DataProviderContext on ( final Trigger ... triggers ) { } } | if ( triggers != null ) { Collections . addAll ( registeredTriggers , triggers ) ; } return this ; |
public class FileSystemDeploymentService { /** * Perform a post - boot scan to remove any deployments added during boot that failed to deploy properly .
* This method isn ' t private solely to allow a unit test in the same package to call it . */
void forcedUndeployScan ( ) { } } | if ( acquireScanLock ( ) ) { try { ROOT_LOGGER . tracef ( "Performing a post-boot forced undeploy scan for scan directory %s" , deploymentDir . getAbsolutePath ( ) ) ; ScanContext scanContext = new ScanContext ( deploymentOperations ) ; // Add remove actions to the plan for anything we count as
// deployed that we didn ' t find on the scan
for ( Map . Entry < String , DeploymentMarker > missing : scanContext . toRemove . entrySet ( ) ) { // remove successful deployment and left will be removed
if ( scanContext . registeredDeployments . containsKey ( missing . getKey ( ) ) ) { scanContext . registeredDeployments . remove ( missing . getKey ( ) ) ; } } Set < String > scannedDeployments = new HashSet < String > ( scanContext . registeredDeployments . keySet ( ) ) ; scannedDeployments . removeAll ( scanContext . persistentDeployments ) ; List < ScannerTask > scannerTasks = scanContext . scannerTasks ; for ( String toUndeploy : scannedDeployments ) { scannerTasks . add ( new UndeployTask ( toUndeploy , deploymentDir , scanContext . scanStartTime , true ) ) ; } try { executeScannerTasks ( scannerTasks , deploymentOperations , true ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } ROOT_LOGGER . tracef ( "Forced undeploy scan complete" ) ; } catch ( Exception e ) { ROOT_LOGGER . scanException ( e , deploymentDir . getAbsolutePath ( ) ) ; } finally { releaseScanLock ( ) ; } } |
public class BeanDefinitionWriter { /** * Visits a pre - destroy method injection point .
* @ param declaringType The declaring type of the method . Either a Class or a string representing the name of the type
* @ param returnType The return type of the method
* @ param methodName The method name */
public void visitPreDestroyMethod ( Object declaringType , Object returnType , String methodName ) { } } | visitPreDestroyMethodDefinition ( ) ; final MethodVisitData methodVisitData = new MethodVisitData ( declaringType , false , returnType , methodName , Collections . emptyMap ( ) , Collections . emptyMap ( ) , Collections . emptyMap ( ) , AnnotationMetadata . EMPTY_METADATA ) ; preDestroyMethodVisits . add ( methodVisitData ) ; visitMethodInjectionPointInternal ( methodVisitData , constructorVisitor , preDestroyMethodVisitor , preDestroyInstanceIndex , ADD_PRE_DESTROY_METHOD ) ; |
public class ProtocolDataUnit { /** * Deserializes a array ( starting from the given offset ) and store the informations to the
* < code > AdditionalHeaderSegment < / code > object .
* @ param pdu The < code > ByteBuffer < / code > to read from .
* @ param offset The offset to start from .
* @ return The length of the written bytes .
* @ throws InternetSCSIException If any violation of the iSCSI - Standard emerge . */
private final int deserializeAdditionalHeaderSegments ( final ByteBuffer pdu , final int offset ) throws InternetSCSIException { } } | // parsing Additional Header Segment
int off = offset ; int ahsLength = basicHeaderSegment . getTotalAHSLength ( ) ; while ( ahsLength != 0 ) { final AdditionalHeaderSegment tmpAHS = new AdditionalHeaderSegment ( ) ; tmpAHS . deserialize ( pdu , off ) ; additionalHeaderSegments . add ( tmpAHS ) ; ahsLength -= tmpAHS . getLength ( ) ; off += tmpAHS . getSpecificField ( ) . position ( ) ; } return off - offset ; |
public class CmsScheduledJobInfo { /** * Updates the request time in the internal context information of the user with the current system time . < p >
* This is required before executing the job , otherwise the context information request time would be the time
* the context object was initially created . < p > */
protected void updateContextRequestTime ( ) { } } | CmsContextInfo context = ( CmsContextInfo ) m_context . clone ( ) ; context . setRequestTime ( System . currentTimeMillis ( ) ) ; context . freeze ( ) ; m_context = context ; |
public class FinalizeMigrationOperation { /** * Notifies all { @ link MigrationAwareService } s that the migration finished . The services can then execute the commit or
* rollback logic . If this node was the source and backup replica for a partition , the services will first be notified that
* the migration is starting . */
private void notifyServices ( ) { } } | PartitionMigrationEvent event = getPartitionMigrationEvent ( ) ; Collection < MigrationAwareService > migrationAwareServices = getMigrationAwareServices ( ) ; // Old backup owner is not notified about migration until migration
// is committed on destination . This is the only place on backup owner
// knows replica is moved away from itself .
if ( isOldBackupReplicaOwner ( ) ) { // execute beforeMigration on old backup before commit / rollback
for ( MigrationAwareService service : migrationAwareServices ) { beforeMigration ( event , service ) ; } } for ( MigrationAwareService service : migrationAwareServices ) { finishMigration ( event , service ) ; } |
public class LinearClassifier { /** * Given a datum ' s features , returns a counter mapping from each
* class name to the log probability of that class .
* Looking at the the sum of e ^ v for each count v , should be 1. */
public Counter < L > logProbabilityOf ( int [ ] features ) { } } | Counter < L > scores = scoresOf ( features ) ; Counters . logNormalizeInPlace ( scores ) ; return scores ; |
public class IamCredentialsClient { /** * Exchange a JWT signed by third party identity provider to an OAuth 2.0 access token
* < p > Sample code :
* < pre > < code >
* try ( IamCredentialsClient iamCredentialsClient = IamCredentialsClient . create ( ) ) {
* ServiceAccountName name = ServiceAccountName . of ( " [ PROJECT ] " , " [ SERVICE _ ACCOUNT ] " ) ;
* List & lt ; String & gt ; scope = new ArrayList & lt ; & gt ; ( ) ;
* String jwt = " " ;
* GenerateIdentityBindingAccessTokenResponse response = iamCredentialsClient . generateIdentityBindingAccessToken ( name , scope , jwt ) ;
* < / code > < / pre >
* @ param name The resource name of the service account for which the credentials are requested ,
* in the following format : ` projects / - / serviceAccounts / { ACCOUNT _ EMAIL _ OR _ UNIQUEID } ` .
* @ param scope Code to identify the scopes to be included in the OAuth 2.0 access token . See
* https : / / developers . google . com / identity / protocols / googlescopes for more information . At
* least one value required .
* @ param jwt Required . Input token . Must be in JWT format according to RFC7523
* ( https : / / tools . ietf . org / html / rfc7523 ) and must have ' kid ' field in the header . Supported
* signing algorithms : RS256 ( RS512 , ES256 , ES512 coming soon ) . Mandatory payload fields
* ( along the lines of RFC 7523 , section 3 ) : - iss : issuer of the token . Must provide a
* discovery document at $ iss / . well - known / openid - configuration . The document needs to be
* formatted according to section 4.2 of the OpenID Connect Discovery 1.0 specification . -
* iat : Issue time in seconds since epoch . Must be in the past . - exp : Expiration time in
* seconds since epoch . Must be less than 48 hours after iat . We recommend to create tokens
* that last shorter than 6 hours to improve security unless business reasons mandate longer
* expiration times . Shorter token lifetimes are generally more secure since tokens that have
* been exfiltrated by attackers can be used for a shorter time . you can configure the maximum
* lifetime of the incoming token in the configuration of the mapper . The resulting Google
* token will expire within an hour or at " exp " , whichever is earlier . - sub : JWT subject ,
* identity asserted in the JWT . - aud : Configured in the mapper policy . By default the
* service account email .
* < p > Claims from the incoming token can be transferred into the output token accoding to the
* mapper configuration . The outgoing claim size is limited . Outgoing claims size must be less
* than 4kB serialized as JSON without whitespace .
* < p > Example header : { " alg " : " RS256 " , " kid " : " 92a4265e14ab04d4d228a48d10d4ca31610936f8 " }
* Example payload : { " iss " : " https : / / accounts . google . com " , " iat " : 1517963104 , " exp " :
* 1517966704 , " aud " : " https : / / iamcredentials . googleapis . com / " , " sub " :
* " 113475438248934895348 " , " my _ claims " : { " additional _ claim " : " value " } }
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final GenerateIdentityBindingAccessTokenResponse generateIdentityBindingAccessToken ( ServiceAccountName name , List < String > scope , String jwt ) { } } | GenerateIdentityBindingAccessTokenRequest request = GenerateIdentityBindingAccessTokenRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . addAllScope ( scope ) . setJwt ( jwt ) . build ( ) ; return generateIdentityBindingAccessToken ( request ) ; |
public class RequestUtils { /** * Reconstructs the current location ( URL ) from the request and
* servlet configuration ( which is the only way to know which server
* we ' re on because that ' s not provided in the request ) and returns
* it .
* @ return The URL that represents our current location . */
public static String getLocation ( HttpServletRequest req ) { } } | StringBuffer rurl = req . getRequestURL ( ) ; String qs = req . getQueryString ( ) ; if ( qs != null ) { rurl . append ( "?" ) . append ( qs ) ; } return rurl . toString ( ) ; |
public class AbstractTreebankParserParams { /** * Takes a Tree and a collinizer and returns a Collection of labeled
* { @ link Constituent } s for PARSEVAL .
* @ param t The tree to extract constituents from
* @ param collinizer The TreeTransformer used to normalize the tree for
* evaluation
* @ return The bag of Constituents for PARSEVAL . */
public static Collection < Constituent > parsevalObjectify ( Tree t , TreeTransformer collinizer ) { } } | return parsevalObjectify ( t , collinizer , true ) ; |
public class ProteinPocketFinder { /** * Method performs the clustering , is called by findPockets ( ) . */
public List < Point3d > clusterPSPPocket ( Point3d root , List < Point3d > subPocket , int [ ] dim ) { } } | // logger . debug ( " * * * * * New Root * * * * * : " + root . x + " " + root . y + "
// " + root . z ) ;
visited . put ( ( int ) root . x + "." + ( int ) root . y + "." + ( int ) root . z , 1 ) ; int [ ] minMax = { 0 , 0 , 0 , 0 , 0 , 0 } ; minMax [ 0 ] = ( int ) ( root . x - linkageRadius ) ; minMax [ 1 ] = ( int ) ( root . x + linkageRadius ) ; minMax [ 2 ] = ( int ) ( root . y - linkageRadius ) ; minMax [ 3 ] = ( int ) ( root . y + linkageRadius ) ; minMax [ 4 ] = ( int ) ( root . z - linkageRadius ) ; minMax [ 5 ] = ( int ) ( root . z + linkageRadius ) ; minMax = checkBoundaries ( minMax , dim ) ; // logger . debug ( " cluster : " + minMax [ 0 ] + " " + minMax [ 1 ] + " " + minMax [ 2 ] + "
// " + minMax [ 3 ] + " " + minMax [ 4 ] + " " + minMax [ 5 ] + " " ) ;
for ( int k = minMax [ 0 ] ; k <= minMax [ 1 ] ; k ++ ) { for ( int m = minMax [ 2 ] ; m <= minMax [ 3 ] ; m ++ ) { for ( int l = minMax [ 4 ] ; l <= minMax [ 5 ] ; l ++ ) { Point3d node = new Point3d ( k , m , l ) ; // logger . debug ( " clusterPSPPocket : " + root . x + "
// " + root . y + " " + root . z + " - > " + k + " " + m + " " + l + "
// # > " + this . grid [ k ] [ m ] [ l ] + " key : " + visited . containsKey ( new
// String ( k + " . " + m + " . " + l ) ) ) ;
if ( this . grid [ k ] [ m ] [ l ] >= minPSCluster && ! visited . containsKey ( k + "." + m + "." + l ) ) { // logger . debug ( " - - - - > FOUND " ) ;
subPocket . add ( node ) ; this . clusterPSPPocket ( node , subPocket , dim ) ; } } } } subPocket . add ( root ) ; return subPocket ; |
public class MessageStreams { /** * Read a file containing entries of a given type . Tries to detect the
* entry format of the file based on file magic . If not detected will try
* to use the default binary serializer format .
* @ param file The file to read .
* @ param serializer The serializer to use .
* @ param descriptor The descriptor of the entry type of the file .
* @ param < Message > The message type .
* @ param < Field > The message field type .
* @ return The stream that reads the file .
* @ throws IOException when unable to open the stream . */
@ Nonnull public static < Message extends PMessage < Message , Field > , Field extends PField > Stream < Message > path ( Path file , Serializer serializer , PMessageDescriptor < Message , Field > descriptor ) throws IOException { } } | return file ( file . toFile ( ) , serializer , descriptor ) ; |
public class Source { /** * Sets the maximum distance for attenuation . */
public void setMaxDistance ( float distance ) { } } | if ( _maxDistance != distance ) { AL10 . alSourcef ( _id , AL10 . AL_MAX_DISTANCE , _maxDistance = distance ) ; } |
public class Description { /** * Emits the line that starts at { @ code start } position , and ends at a newline or the { @ code end } position .
* @ return the start position of the next line ( the { @ code end } position */
private static int emitLine ( StringBuilder target , char [ ] buffer , int start , int end , int indentation ) { } } | int last = start ; for ( int pos = start , cp ; pos < end ; pos += charCount ( cp ) ) { cp = codePointAt ( buffer , pos ) ; if ( cp == '\n' ) { end = pos + 1 ; } if ( indentation > 0 ) { if ( -- indentation == 0 ) { start = pos + 1 ; } last = pos ; } else if ( ! isWhitespace ( cp ) ) { last = pos + 1 ; } } target . append ( buffer , start , last - start ) ; return end ; |
public class MPJwtBadMPConfigAsSystemProperties { /** * The server will be started with all mp - config properties incorrectly configured in the jvm . options file .
* The server . xml has NO mp _ jwt config specified .
* The config settings should come from the system properties ( defined in jvm . options ) .
* The test should fail as the issuer is bad .
* @ throws Exception */
@ Mode ( TestMode . LITE ) @ Test public void MPJwtBadMPConfigAsSystemProperties_MpJwtConfigNotSpecifiedInServerXml ( ) throws Exception { } } | standardTestFlow ( resourceServer , MpJwtFatConstants . NO_MP_CONFIG_IN_APP_ROOT_CONTEXT , MpJwtFatConstants . NO_MP_CONFIG_IN_APP_APP , MpJwtFatConstants . MPJWT_APP_CLASS_NO_MP_CONFIG_IN_APP , setBadIssuerExpectations ( resourceServer ) ) ; |
public class AccessUrlIfFunction { /** * 获取到当前登录用户凭证 , 如果不存在 , 返回一个匿名凭证
* @ param securityContext
* 当前安全上下文
* @ return 当前登录用户凭证 , 如果不存在 , 返回一个匿名凭证 */
private Authentication getAuthentication ( SecurityContext securityContext ) { } } | // 用户未登录
Authentication authentication = securityContext . getAuthentication ( ) ; if ( authentication == null ) { authentication = new AnonymousAuthenticationToken ( UUID . randomUUID ( ) . toString ( ) , "anonymous" , Collections . < GrantedAuthority > singletonList ( new SimpleGrantedAuthority ( "ROLE_ANONYMOUS" ) ) ) ; } return authentication ; |
public class UDPConnLink { /** * Common connect logic between sync and async connect requests .
* @ param _ udpRequestContextObject
* @ throws IOException */
private void connectCommon ( Object _udpRequestContextObject ) throws IOException { } } | String localAddress = "*" ; int localPort = 0 ; Map < Object , Object > vcStateMap = getVirtualConnection ( ) . getStateMap ( ) ; if ( vcStateMap != null ) { // Size of the buffer the channel should use to read .
String value = ( String ) vcStateMap . get ( UDPConfigConstants . CHANNEL_RCV_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . CHANNEL_RCV_BUFF_SIZE + " " + value ) ; } cfg . setChannelReceiveBufferSize ( Integer . parseInt ( value ) ) ; } // Receive buffer size .
value = ( String ) vcStateMap . get ( UDPConfigConstants . RCV_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . RCV_BUFF_SIZE + " " + value ) ; } cfg . setReceiveBufferSize ( Integer . parseInt ( value ) ) ; } // Send buffer size
value = ( String ) vcStateMap . get ( UDPConfigConstants . SEND_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . SEND_BUFF_SIZE + " " + value ) ; } cfg . setSendBufferSize ( Integer . parseInt ( value ) ) ; } } // Allow for this to be null . If the requestContext is null , then just
// allow The NetworkLayer to find the port to listen on .
if ( _udpRequestContextObject != null ) { final UDPRequestContext udpRequestContext = ( UDPRequestContext ) _udpRequestContextObject ; final InetSocketAddress addr = udpRequestContext . getLocalAddress ( ) ; localAddress = addr . getAddress ( ) . getHostAddress ( ) ; localPort = addr . getPort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "connect with local address: " + localAddress + " local port: " + localPort ) ; } } udpNetworkLayer = new UDPNetworkLayer ( udpChannel , workQueueMgr , localAddress , localPort ) ; udpNetworkLayer . initDatagramSocket ( getVirtualConnection ( ) ) ; udpNetworkLayer . setConnLink ( this ) ; |
public class MediaModuleGenerator { /** * Generation of embed tag .
* @ param m source
* @ param e element to attach new element to */
private void generateEmbed ( final Metadata m , final Element e ) { } } | if ( m . getEmbed ( ) == null ) { return ; } final Element embedElement = new Element ( "embed" , NS ) ; addNotNullAttribute ( embedElement , "url" , m . getEmbed ( ) . getUrl ( ) ) ; addNotNullAttribute ( embedElement , "width" , m . getEmbed ( ) . getWidth ( ) ) ; addNotNullAttribute ( embedElement , "height" , m . getEmbed ( ) . getHeight ( ) ) ; for ( final Param param : m . getEmbed ( ) . getParams ( ) ) { final Element paramElement = addNotNullElement ( embedElement , "param" , param . getValue ( ) ) ; if ( paramElement != null ) { addNotNullAttribute ( paramElement , "name" , param . getName ( ) ) ; } } if ( embedElement . hasAttributes ( ) || ! embedElement . getChildren ( ) . isEmpty ( ) ) { e . addContent ( embedElement ) ; } |
public class ToXMLStream { /** * From XSLTC
* Declare a prefix to point to a namespace URI . Inform SAX handler
* if this is a new prefix mapping . */
protected boolean pushNamespace ( String prefix , String uri ) { } } | try { if ( m_prefixMap . pushNamespace ( prefix , uri , m_elemContext . m_currentElemDepth ) ) { startPrefixMapping ( prefix , uri ) ; return true ; } } catch ( SAXException e ) { // falls through
} return false ; |
public class Context { /** * 判断全局变量是否存在
* @ param i
* @ return */
protected boolean exist ( int i ) { } } | if ( i >= this . tempVarStartIndex ) { return true ; } else { Object object = vars [ i ] ; return object != NOT_EXIST_OBJECT ; } |
public class Client { /** * Returns a list of bindings where provided queue is the destination .
* @ param vhost vhost of the exchange
* @ param queue destination queue name
* @ return list of bindings */
public List < BindingInfo > getQueueBindings ( String vhost , String queue ) { } } | final URI uri = uriWithPath ( "./queues/" + encodePathSegment ( vhost ) + "/" + encodePathSegment ( queue ) + "/bindings" ) ; final BindingInfo [ ] result = this . rt . getForObject ( uri , BindingInfo [ ] . class ) ; return asListOrNull ( result ) ; |
public class BarData { /** * Gets the size of the data .
* @ return the data */
public int size ( ) { } } | int ret = 0 ; for ( Collection < T > series : data ) { if ( series . size ( ) > 0 ) ++ ret ; } return ret ; |
public class CFSA2Serializer { /** * Write a v - int to a byte array . */
static int writeVInt ( byte [ ] array , int offset , int value ) { } } | assert value >= 0 : "Can't v-code negative ints." ; while ( value > 0x7F ) { array [ offset ++ ] = ( byte ) ( 0x80 | ( value & 0x7F ) ) ; value >>= 7 ; } array [ offset ++ ] = ( byte ) value ; return offset ; |
public class AbstractHeaderDialogBuilder { /** * Obtains , whether the dialog ' s header should be shown , or not , from a specific theme .
* @ param themeResourceId
* The resource id of the theme , the visibility should be obtained from , as an { @ link
* Integer } value */
private void obtainShowHeader ( @ StyleRes final int themeResourceId ) { } } | TypedArray typedArray = getContext ( ) . getTheme ( ) . obtainStyledAttributes ( themeResourceId , new int [ ] { R . attr . materialDialogShowHeader } ) ; showHeader ( typedArray . getBoolean ( 0 , false ) ) ; |
public class TouchState { /** * Copies the contents of this state object to another .
* @ param target The TouchState to which to copy this state ' s data . */
void copyTo ( TouchState target ) { } } | target . clear ( ) ; for ( int i = 0 ; i < pointCount ; i ++ ) { target . addPoint ( points [ i ] ) ; } target . primaryID = primaryID ; target . window = window ; |
public class SystemFunctionSet { /** * Map keys
* @ param m
* @ return */
@ MapMethod public List < String > keys ( Map < String , Object > m ) { } } | List < String > l = Objects . newArrayList ( ) ; for ( String k : m . keySet ( ) ) { l . add ( k ) ; } return l ; |
public class CmsWorkplace { /** * Sets site and project in the workplace settings with the request values of parameters
* < code > { @ link CmsWorkplace # PARAM _ WP _ SITE } < / code > and < code > { @ link CmsWorkplace # PARAM _ WP _ PROJECT } < / code > . < p >
* @ param settings the workplace settings
* @ param request the current request
* @ return true , if a reload of the main body frame is required */
public boolean initSettings ( CmsWorkplaceSettings settings , HttpServletRequest request ) { } } | // check if the user requested a project change
String project = request . getParameter ( PARAM_WP_PROJECT ) ; boolean reloadRequired = false ; if ( project != null ) { reloadRequired = true ; try { getCms ( ) . readProject ( new CmsUUID ( project ) ) ; } catch ( Exception e ) { // project not found , set online project
project = String . valueOf ( CmsProject . ONLINE_PROJECT_ID ) ; } try { m_cms . getRequestContext ( ) . setCurrentProject ( getCms ( ) . readProject ( new CmsUUID ( project ) ) ) ; } catch ( Exception e ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( e ) ; } } settings . setProject ( new CmsUUID ( project ) ) ; } // check if the user requested a site change
String site = request . getParameter ( PARAM_WP_SITE ) ; if ( site != null ) { reloadRequired = true ; m_cms . getRequestContext ( ) . setSiteRoot ( site ) ; settings . setSite ( site ) ; } // check which resource was requested
String explorerResource = request . getParameter ( PARAM_WP_EXPLORER_RESOURCE ) ; if ( explorerResource != null ) { reloadRequired = true ; settings . setExplorerResource ( explorerResource , getCms ( ) ) ; } return reloadRequired ; |
public class CollectionUtility { /** * 分割数组为两个数组
* @ param src 原数组
* @ param rate 第一个数组所占的比例
* @ return 两个数组 */
public static String [ ] [ ] spiltArray ( String [ ] src , double rate ) { } } | assert 0 <= rate && rate <= 1 ; String [ ] [ ] output = new String [ 2 ] [ ] ; output [ 0 ] = new String [ ( int ) ( src . length * rate ) ] ; output [ 1 ] = new String [ src . length - output [ 0 ] . length ] ; System . arraycopy ( src , 0 , output [ 0 ] , 0 , output [ 0 ] . length ) ; System . arraycopy ( src , output [ 0 ] . length , output [ 1 ] , 0 , output [ 1 ] . length ) ; return output ; |
public class Identifiers { /** * Create an extended { @ link Identity } identifier .
* @ param extendedIdentifier the extended identifier XML
* @ param c charset used for encoding the string
* @ return the new extended identity instance
* @ throws IfmapException */
public static Identity createExtendedIdentity ( String extendedIdentifier , Charset c ) throws MarshalException { } } | return createExtendedIdentity ( DomHelpers . toDocument ( extendedIdentifier , c ) ) ; |
public class Clicker { /** * Clicks on a given coordinate on the screen .
* @ param x the x coordinate
* @ param y the y coordinate */
public void clickOnScreen ( float x , float y , View view ) { } } | boolean successfull = false ; int retry = 0 ; SecurityException ex = null ; while ( ! successfull && retry < 20 ) { long downTime = SystemClock . uptimeMillis ( ) ; long eventTime = SystemClock . uptimeMillis ( ) ; MotionEvent event = MotionEvent . obtain ( downTime , eventTime , MotionEvent . ACTION_DOWN , x , y , 0 ) ; MotionEvent event2 = MotionEvent . obtain ( downTime , eventTime , MotionEvent . ACTION_UP , x , y , 0 ) ; try { inst . sendPointerSync ( event ) ; inst . sendPointerSync ( event2 ) ; successfull = true ; } catch ( SecurityException e ) { ex = e ; dialogUtils . hideSoftKeyboard ( null , false , true ) ; sleeper . sleep ( MINI_WAIT ) ; retry ++ ; View identicalView = viewFetcher . getIdenticalView ( view ) ; if ( identicalView != null ) { float [ ] xyToClick = getClickCoordinates ( identicalView ) ; x = xyToClick [ 0 ] ; y = xyToClick [ 1 ] ; } } } if ( ! successfull ) { Assert . fail ( "Click at (" + x + ", " + y + ") can not be completed! (" + ( ex != null ? ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) : "null" ) + ")" ) ; } |
public class PropertyFilterList { /** * Searches list for given PropertyFilter . */
public boolean contains ( PropertyFilter < S > propFilter ) { } } | if ( mPropFilter == propFilter ) { return true ; } if ( mNext == null ) { return false ; } // Tail recursion .
return mNext . contains ( propFilter ) ; |
public class ApproximateHistogram { /** * Combines two sets of histogram bins using merge - sort and computes the delta between consecutive bin positions .
* Duplicate bins are merged together .
* @ param leftBinCount
* @ param leftPositions
* @ param leftBins
* @ param rightBinCount
* @ param rightPositions
* @ param rightBins
* @ param mergedPositions array to store the combined bin positions ( size must be at least leftBinCount + rightBinCount )
* @ param mergedBins array to store the combined bin counts ( size must be at least leftBinCount + rightBinCount )
* @ param deltas deltas between consecutive bin positions in the merged bins ( size must be at least leftBinCount + rightBinCount )
* @ return the number of combined bins */
private static int combineBins ( int leftBinCount , float [ ] leftPositions , long [ ] leftBins , int rightBinCount , float [ ] rightPositions , long [ ] rightBins , float [ ] mergedPositions , long [ ] mergedBins , float [ ] deltas ) { } } | int i = 0 ; int j = 0 ; int k = 0 ; while ( j < leftBinCount || k < rightBinCount ) { if ( j < leftBinCount && ( k == rightBinCount || leftPositions [ j ] < rightPositions [ k ] ) ) { mergedPositions [ i ] = leftPositions [ j ] ; mergedBins [ i ] = leftBins [ j ] ; ++ j ; } else if ( k < rightBinCount && ( j == leftBinCount || leftPositions [ j ] > rightPositions [ k ] ) ) { mergedPositions [ i ] = rightPositions [ k ] ; mergedBins [ i ] = rightBins [ k ] ; ++ k ; } else { // combine overlapping bins
mergedPositions [ i ] = leftPositions [ j ] ; mergedBins [ i ] = leftBins [ j ] + rightBins [ k ] ; ++ j ; ++ k ; } if ( deltas != null && i > 0 ) { deltas [ i - 1 ] = mergedPositions [ i ] - mergedPositions [ i - 1 ] ; } ++ i ; } return i ; |
public class MSPDIReader { /** * This method extracts calendar data from an MSPDI file .
* @ param project Root node of the MSPDI file
* @ param map Map of calendar UIDs to names */
private void readCalendars ( Project project , HashMap < BigInteger , ProjectCalendar > map ) { } } | Project . Calendars calendars = project . getCalendars ( ) ; if ( calendars != null ) { LinkedList < Pair < ProjectCalendar , BigInteger > > baseCalendars = new LinkedList < Pair < ProjectCalendar , BigInteger > > ( ) ; for ( Project . Calendars . Calendar cal : calendars . getCalendar ( ) ) { readCalendar ( cal , map , baseCalendars ) ; } updateBaseCalendarNames ( baseCalendars , map ) ; } try { ProjectProperties properties = m_projectFile . getProjectProperties ( ) ; BigInteger calendarID = new BigInteger ( properties . getDefaultCalendarName ( ) ) ; ProjectCalendar calendar = map . get ( calendarID ) ; m_projectFile . setDefaultCalendar ( calendar ) ; } catch ( Exception ex ) { // Ignore exceptions
} |
public class CompareFileExtensions { /** * Completes the compare from the files encapsulated in the FileCompareResultBean .
* @ param fileCompareResultBean
* the FileCompareResultBean . */
public static void completeCompare ( final IFileCompareResultBean fileCompareResultBean ) { } } | compare ( fileCompareResultBean , false , false , false , false , false ) ; |
public class AjaxWRadioButtonSelectExample { /** * Set the content of the text field depending on the selected option in the radio button select .
* @ param request the request being processed */
@ Override protected void preparePaintComponent ( final Request request ) { } } | if ( OPTION_CONTENT1 . equals ( rbSelect . getSelected ( ) ) ) { content . setText ( "This is content 1" ) ; } else if ( OPTION_CONTENT2 . equals ( rbSelect . getSelected ( ) ) ) { content . setText ( "This is content 2" ) ; } else if ( OPTION_CONTENT3 . equals ( rbSelect . getSelected ( ) ) ) { content . setText ( "This is content 3" ) ; } else { content . setText ( null ) ; } |
public class ExpiryPolicyBuilder { /** * Set TTI since last access .
* Note : Calling this method on a builder with an existing TTI since last access will override the previous value or function .
* @ param access TTI since last access
* @ return a new builder with the TTI since last access */
public ExpiryPolicyBuilder < K , V > access ( Duration access ) { } } | if ( access != null && access . isNegative ( ) ) { throw new IllegalArgumentException ( "Access duration must be positive" ) ; } return access ( ( a , b ) -> access ) ; |
public class RouteHandler { /** * Routes the request specified by the given request URI and HTTP method .
* @ param requestURI the given request URI
* @ param httpMethod the given HTTP method
* @ return MatchResult , returns { @ code null } if not found */
public static MatchResult doMatch ( final String requestURI , final String httpMethod ) { } } | MatchResult ret ; final int segs = StringUtils . countMatches ( requestURI , "/" ) ; ContextHandlerMeta contextHandlerMeta ; String concreteKey = httpMethod + "." + requestURI ; switch ( segs ) { case 1 : contextHandlerMeta = ONE_SEG_CONCRETE_CTX_HANDLER_METAS . get ( concreteKey ) ; if ( null != contextHandlerMeta ) { return new MatchResult ( contextHandlerMeta , requestURI , httpMethod , requestURI ) ; } switch ( httpMethod ) { case "GET" : return route ( requestURI , httpMethod , ONE_SEG_GET_VAR_CTX_HANDLER_METAS ) ; case "POST" : return route ( requestURI , httpMethod , ONE_SEG_POST_VAR_CTX_HANDLER_METAS ) ; case "PUT" : return route ( requestURI , httpMethod , ONE_SEG_PUT_VAR_CTX_HANDLER_METAS ) ; case "DELETE" : return route ( requestURI , httpMethod , ONE_SEG_DELETE_VAR_CTX_HANDLER_METAS ) ; default : return route ( requestURI , httpMethod , ONE_SEG_OTHER_METHOD_VAR_CTX_HANDLER_METAS ) ; } case 2 : contextHandlerMeta = TWO_SEG_CONCRETE_CTX_HANDLER_METAS . get ( concreteKey ) ; if ( null != contextHandlerMeta ) { return new MatchResult ( contextHandlerMeta , requestURI , httpMethod , requestURI ) ; } switch ( httpMethod ) { case "GET" : return route ( requestURI , httpMethod , TWO_SEG_GET_VAR_CTX_HANDLER_METAS ) ; case "POST" : return route ( requestURI , httpMethod , TWO_SEG_POST_VAR_CTX_HANDLER_METAS ) ; case "PUT" : return route ( requestURI , httpMethod , TWO_SEG_PUT_VAR_CTX_HANDLER_METAS ) ; case "DELETE" : return route ( requestURI , httpMethod , TWO_SEG_DELETE_VAR_CTX_HANDLER_METAS ) ; default : return route ( requestURI , httpMethod , TWO_SEG_OTHER_METHOD_VAR_CTX_HANDLER_METAS ) ; } case 3 : contextHandlerMeta = THREE_SEG_CONCRETE_CTX_HANDLER_METAS . get ( concreteKey ) ; if ( null != contextHandlerMeta ) { return new MatchResult ( contextHandlerMeta , requestURI , httpMethod , requestURI ) ; } switch ( httpMethod ) { case "GET" : return route ( requestURI , httpMethod , THREE_SEG_GET_VAR_CTX_HANDLER_METAS ) ; case "POST" : return route ( requestURI , httpMethod , THREE_SEG_POST_VAR_CTX_HANDLER_METAS ) ; case "PUT" : return route ( requestURI , httpMethod , THREE_SEG_PUT_VAR_CTX_HANDLER_METAS ) ; case "DELETE" : return route ( requestURI , httpMethod , THREE_SEG_DELETE_VAR_CTX_HANDLER_METAS ) ; default : return route ( requestURI , httpMethod , THREE_SEG_OTHER_METHOD_VAR_CTX_HANDLER_METAS ) ; } default : contextHandlerMeta = FOUR_MORE_SEG_CONCRETE_CTX_HANDLER_METAS . get ( concreteKey ) ; if ( null != contextHandlerMeta ) { return new MatchResult ( contextHandlerMeta , requestURI , httpMethod , requestURI ) ; } switch ( httpMethod ) { case "GET" : return route ( requestURI , httpMethod , FOUR_MORE_SEG_GET_VAR_CTX_HANDLER_METAS ) ; case "POST" : return route ( requestURI , httpMethod , FOUR_MORE_SEG_POST_VAR_CTX_HANDLER_METAS ) ; case "PUT" : return route ( requestURI , httpMethod , FOUR_MORE_SEG_PUT_VAR_CTX_HANDLER_METAS ) ; case "DELETE" : return route ( requestURI , httpMethod , FOUR_MORE_SEG_DELETE_VAR_CTX_HANDLER_METAS ) ; default : return route ( requestURI , httpMethod , FOUR_MORE_SEG_OTHER_METHOD_VAR_CTX_HANDLER_METAS ) ; } } |
public class WebContext { /** * Get application environment information .
* @ param key environment key
* @ param defaultValue default value , if value is null
* @ return environment optional value */
public String env ( String key , String defaultValue ) { } } | return blade ( ) . env ( key , defaultValue ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ElementaryFunctionsType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "arcsech" ) public JAXBElement < ElementaryFunctionsType > createArcsech ( ElementaryFunctionsType value ) { } } | return new JAXBElement < ElementaryFunctionsType > ( _Arcsech_QNAME , ElementaryFunctionsType . class , null , value ) ; |
public class AbstractAdminObject { /** * Adds a new event to this AdminObject .
* @ param _ eventtype Eventtype class name to add
* @ param _ eventdef EventDefinition to add
* @ see # events
* @ throws CacheReloadException on error */
public void addEvent ( final EventType _eventtype , final EventDefinition _eventdef ) throws CacheReloadException { } } | List < EventDefinition > evenList = this . events . get ( _eventtype ) ; if ( evenList == null ) { evenList = new ArrayList < > ( ) ; this . events . put ( _eventtype , evenList ) ; } if ( ! evenList . contains ( _eventdef ) ) { evenList . add ( _eventdef ) ; } // if there are more than one event they must be sorted by their index
// position
if ( evenList . size ( ) > 1 ) { Collections . sort ( evenList , new Comparator < EventDefinition > ( ) { @ Override public int compare ( final EventDefinition _eventDef0 , final EventDefinition _eventDef1 ) { return Long . compare ( _eventDef0 . getIndexPos ( ) , _eventDef1 . getIndexPos ( ) ) ; } } ) ; } setDirty ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.