signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class ReservedDBInstance { /** * The recurring price charged to run this reserved DB instance .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRecurringCharges ( java . util . Collection ) } or { @ link # withRecurringCharges ( java . util . Collection ) } if you
* want to override the existing values .
* @ param recurringCharges
* The recurring price charged to run this reserved DB instance .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ReservedDBInstance withRecurringCharges ( RecurringCharge ... recurringCharges ) { } }
|
if ( this . recurringCharges == null ) { setRecurringCharges ( new com . amazonaws . internal . SdkInternalList < RecurringCharge > ( recurringCharges . length ) ) ; } for ( RecurringCharge ele : recurringCharges ) { this . recurringCharges . add ( ele ) ; } return this ;
|
public class JavaScriptPostAggregator { /** * { @ link # compute } can be called by multiple threads , so this function should be thread - safe to avoid extra
* script compilation . */
@ EnsuresNonNull ( "fn" ) private Function getCompiledScript ( ) { } }
|
// JavaScript configuration should be checked when it ' s actually used because someone might still want Druid
// nodes to be able to deserialize JavaScript - based objects even though JavaScript is disabled .
Preconditions . checkState ( config . isEnabled ( ) , "JavaScript is disabled" ) ; Function syncedFn = fn ; if ( syncedFn == null ) { synchronized ( config ) { syncedFn = fn ; if ( syncedFn == null ) { syncedFn = compile ( function ) ; fn = syncedFn ; } } } return syncedFn ;
|
public class TabbedPaneTabCloseButtonPainter { /** * Draw a border around the graphic .
* @ param g the Graphic context .
* @ param width the width of the border .
* @ param height the height of the border .
* @ param color the color of the border .
* @ param size the spread of the border from outside in , expressed as a
* percentage of the height . */
private void drawBorder ( Graphics2D g , int width , int height , Color color , float size ) { } }
|
int max = ( int ) ( Math . min ( ( height - 2 ) * size , height / 2.0f ) + 0.5 ) ; int alphaDelta = color . getAlpha ( ) / max ; for ( int i = 0 ; i < max ; i ++ ) { Shape s = shapeGenerator . createRoundRectangle ( i , i , width - 2 * i - 1 , height - 2 * i - 1 , CornerSize . CHECKBOX_INTERIOR ) ; Color newColor = new Color ( color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) , color . getAlpha ( ) - i * alphaDelta ) ; g . setPaint ( newColor ) ; g . draw ( s ) ; }
|
public class AmazonLexModelBuildingClient { /** * Starts a job to import a resource to Amazon Lex .
* @ param startImportRequest
* @ return Result of the StartImport operation returned by the service .
* @ throws LimitExceededException
* The request exceeded a limit . Try your request again .
* @ throws InternalFailureException
* An internal Amazon Lex error occurred . Try your request again .
* @ throws BadRequestException
* The request is not well formed . For example , a value is invalid or a required field is missing . Check the
* field values , and try again .
* @ sample AmazonLexModelBuilding . StartImport
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lex - models - 2017-04-19 / StartImport " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public StartImportResult startImport ( StartImportRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeStartImport ( request ) ;
|
public class ConstantAverager { /** * / * ( non - Javadoc )
* @ see Averager # addElement ( java . util . Map , java . util . Map ) */
@ Override public void addElement ( Map < String , Object > e , Map < String , AggregatorFactory > a ) { } }
|
// since we return a constant , no need to read from the event
|
public class SipServletMessageImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . SipServletMessage # setExpires ( int ) */
public void setExpires ( int seconds ) { } }
|
try { ExpiresHeader expiresHeader = SipFactoryImpl . headerFactory . createExpiresHeader ( seconds ) ; expiresHeader . setExpires ( seconds ) ; this . message . setExpires ( expiresHeader ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Error setting expiration header!" , e ) ; }
|
public class MPSImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EList < MPSRG > getFixedLengthRG ( ) { } }
|
if ( fixedLengthRG == null ) { fixedLengthRG = new EObjectContainmentEList . Resolving < MPSRG > ( MPSRG . class , this , AfplibPackage . MPS__FIXED_LENGTH_RG ) ; } return fixedLengthRG ;
|
public class RBBITableBuilder { void build ( ) { } }
|
// If there were no rules , just return . This situation can easily arise
// for the reverse rules .
if ( fRB . fTreeRoots [ fRootIx ] == null ) { return ; } // Walk through the tree , replacing any references to $ variables with a copy of the
// parse tree for the substition expression .
fRB . fTreeRoots [ fRootIx ] = fRB . fTreeRoots [ fRootIx ] . flattenVariables ( ) ; if ( fRB . fDebugEnv != null && fRB . fDebugEnv . indexOf ( "ftree" ) >= 0 ) { System . out . println ( "Parse tree after flattening variable references." ) ; fRB . fTreeRoots [ fRootIx ] . printTree ( true ) ; } // If the rules contained any references to { bof }
// add a { bof } < cat > < former root of tree > to the
// tree . Means that all matches must start out with the
// { bof } fake character .
if ( fRB . fSetBuilder . sawBOF ( ) ) { RBBINode bofTop = new RBBINode ( RBBINode . opCat ) ; RBBINode bofLeaf = new RBBINode ( RBBINode . leafChar ) ; bofTop . fLeftChild = bofLeaf ; bofTop . fRightChild = fRB . fTreeRoots [ fRootIx ] ; bofLeaf . fParent = bofTop ; bofLeaf . fVal = 2 ; // Reserved value for { bof } .
fRB . fTreeRoots [ fRootIx ] = bofTop ; } // Add a unique right - end marker to the expression .
// Appears as a cat - node , left child being the original tree ,
// right child being the end marker .
RBBINode cn = new RBBINode ( RBBINode . opCat ) ; cn . fLeftChild = fRB . fTreeRoots [ fRootIx ] ; fRB . fTreeRoots [ fRootIx ] . fParent = cn ; cn . fRightChild = new RBBINode ( RBBINode . endMark ) ; cn . fRightChild . fParent = cn ; fRB . fTreeRoots [ fRootIx ] = cn ; // Replace all references to UnicodeSets with the tree for the equivalent
// expression .
fRB . fTreeRoots [ fRootIx ] . flattenSets ( ) ; if ( fRB . fDebugEnv != null && fRB . fDebugEnv . indexOf ( "stree" ) >= 0 ) { System . out . println ( "Parse tree after flattening Unicode Set references." ) ; fRB . fTreeRoots [ fRootIx ] . printTree ( true ) ; } // calculate the functions nullable , firstpos , lastpos and followpos on
// nodes in the parse tree .
// See the alogrithm description in Aho .
// Understanding how this works by looking at the code alone will be
// nearly impossible .
calcNullable ( fRB . fTreeRoots [ fRootIx ] ) ; calcFirstPos ( fRB . fTreeRoots [ fRootIx ] ) ; calcLastPos ( fRB . fTreeRoots [ fRootIx ] ) ; calcFollowPos ( fRB . fTreeRoots [ fRootIx ] ) ; if ( fRB . fDebugEnv != null && fRB . fDebugEnv . indexOf ( "pos" ) >= 0 ) { System . out . print ( "\n" ) ; printPosSets ( fRB . fTreeRoots [ fRootIx ] ) ; } // For " chained " rules , modify the followPos sets
if ( fRB . fChainRules ) { calcChainedFollowPos ( fRB . fTreeRoots [ fRootIx ] ) ; } // BOF ( start of input ) test fixup .
if ( fRB . fSetBuilder . sawBOF ( ) ) { bofFixup ( ) ; } // Build the DFA state transition tables .
buildStateTable ( ) ; flagAcceptingStates ( ) ; flagLookAheadStates ( ) ; flagTaggedStates ( ) ; // Update the global table of rule status { tag } values
// The rule builder has a global vector of status values that are common
// for all tables . Merge the ones from this table into the global set .
mergeRuleStatusVals ( ) ; if ( fRB . fDebugEnv != null && fRB . fDebugEnv . indexOf ( "states" ) >= 0 ) { printStates ( ) ; }
|
public class ConvertUtil { /** * Returns true if the value is any numeric type and has a value of 1 , or
* if string type has a value of ' y ' , ' t ' , ' true ' or ' yes ' . Otherwise , return false .
* @ param value value to convert
* @ return true if the value is any numeric type and has a value of 1 , or
* if string type has a value of ' y ' , ' t ' , ' true ' or ' yes ' . Otherwise , return false . */
public static Boolean toBoolean ( Object value ) { } }
|
if ( value == null ) { return false ; } else if ( value instanceof Boolean ) { return ( Boolean ) value ; } else if ( value instanceof BigDecimal ) { return value . equals ( BigDecimal . ONE ) ; } else if ( value instanceof Long ) { return value . equals ( 1L ) ; } else if ( value instanceof Integer ) { return value . equals ( 1 ) ; } else if ( value instanceof Character ) { return value . equals ( 'y' ) || value . equals ( 'Y' ) || value . equals ( 't' ) || value . equals ( 'T' ) ; } else return value . toString ( ) . equalsIgnoreCase ( "yes" ) || value . toString ( ) . equalsIgnoreCase ( "true" ) || value . toString ( ) . equalsIgnoreCase ( "y" ) || value . toString ( ) . equalsIgnoreCase ( "t" ) || Boolean . parseBoolean ( value . toString ( ) ) ;
|
public class Manager { @ InterfaceAudience . Private private void replaceDatabase ( String databaseName , InputStream databaseStream , Iterator < Map . Entry < String , InputStream > > attachmentStreams ) throws CouchbaseLiteException { } }
|
try { Database db = getDatabase ( databaseName , false ) ; String dstDbPath = FileDirUtils . getPathWithoutExt ( db . getPath ( ) ) + kV1DBExtension ; String dstAttsPath = FileDirUtils . getPathWithoutExt ( dstDbPath ) + " attachments" ; OutputStream destStream = new FileOutputStream ( new File ( dstDbPath ) ) ; StreamUtils . copyStream ( databaseStream , destStream ) ; File attachmentsFile = new File ( dstAttsPath ) ; FileDirUtils . deleteRecursive ( attachmentsFile ) ; if ( ! attachmentsFile . exists ( ) ) { attachmentsFile . mkdirs ( ) ; } if ( attachmentStreams != null ) { StreamUtils . copyStreamsToFolder ( attachmentStreams , attachmentsFile ) ; } if ( ! upgradeV1Database ( databaseName , dstDbPath ) ) { throw new CouchbaseLiteException ( Status . INTERNAL_SERVER_ERROR ) ; } db . open ( ) ; db . replaceUUIDs ( ) ; } catch ( FileNotFoundException e ) { Log . e ( Database . TAG , "Error replacing the database: %s" , e , databaseName ) ; throw new CouchbaseLiteException ( Status . INTERNAL_SERVER_ERROR ) ; } catch ( IOException e ) { Log . e ( Database . TAG , "Error replacing the database: %s" , e , databaseName ) ; throw new CouchbaseLiteException ( Status . INTERNAL_SERVER_ERROR ) ; }
|
public class GitHubHookRegisterProblemMonitor { /** * Save the settings to a file . Called on each change of { @ code ignored } list */
@ Override public synchronized void save ( ) { } }
|
if ( BulkChange . contains ( this ) ) { return ; } try { getConfigFile ( ) . write ( this ) ; SaveableListener . fireOnChange ( this , getConfigFile ( ) ) ; } catch ( IOException e ) { LOGGER . error ( "{}" , e ) ; }
|
public class AtomicGrowingSparseMatrix { /** * { @ inheritDoc } */
public void setRow ( int row , double [ ] columns ) { } }
|
checkIndices ( row , 0 ) ; AtomicSparseVector rowEntry = getRow ( row , columns . length - 1 , true ) ; denseArrayReadLock . lock ( ) ; for ( int i = 0 ; i < columns . length ; ++ i ) rowEntry . set ( i , columns [ i ] ) ; denseArrayReadLock . unlock ( ) ;
|
public class PaginationHandler { /** * 新建count查询的MappedStatement
* @ param ms
* @ return */
public MappedStatement getCountMappedStatement ( MappedStatement ms ) { } }
|
String newMsId = ms . getId ( ) + PAGE_COUNT_SUFFIX ; MappedStatement statement = null ; Configuration configuration = ms . getConfiguration ( ) ; try { statement = configuration . getMappedStatement ( newMsId ) ; if ( statement != null ) return statement ; } catch ( Exception e ) { } synchronized ( configuration ) { if ( configuration . hasStatement ( newMsId ) ) return configuration . getMappedStatement ( newMsId ) ; MappedStatement . Builder builder = new MappedStatement . Builder ( ms . getConfiguration ( ) , newMsId , ms . getSqlSource ( ) , ms . getSqlCommandType ( ) ) ; builder . resource ( ms . getResource ( ) ) ; builder . fetchSize ( ms . getFetchSize ( ) ) ; builder . statementType ( ms . getStatementType ( ) ) ; builder . keyGenerator ( ms . getKeyGenerator ( ) ) ; if ( ms . getKeyProperties ( ) != null && ms . getKeyProperties ( ) . length != 0 ) { StringBuilder keyProperties = new StringBuilder ( ) ; for ( String keyProperty : ms . getKeyProperties ( ) ) { keyProperties . append ( keyProperty ) . append ( "," ) ; } keyProperties . delete ( keyProperties . length ( ) - 1 , keyProperties . length ( ) ) ; builder . keyProperty ( keyProperties . toString ( ) ) ; } builder . timeout ( ms . getTimeout ( ) ) ; builder . parameterMap ( ms . getParameterMap ( ) ) ; // count查询返回值int
List < ResultMap > resultMaps = new ArrayList < ResultMap > ( ) ; String id = newMsId + "-Inline" ; ResultMap resultMap = new ResultMap . Builder ( configuration , id , Long . class , new ArrayList < ResultMapping > ( 0 ) ) . build ( ) ; resultMaps . add ( resultMap ) ; builder . resultMaps ( resultMaps ) ; builder . resultSetType ( ms . getResultSetType ( ) ) ; builder . cache ( ms . getCache ( ) ) ; builder . flushCacheRequired ( ms . isFlushCacheRequired ( ) ) ; builder . useCache ( ms . isUseCache ( ) ) ; statement = builder . build ( ) ; configuration . addMappedStatement ( statement ) ; return statement ; }
|
public class GenericCriteria { /** * Evalutes AND and OR operators .
* @ param container data context
* @ param promptValues responses to prompts
* @ return operator result */
private boolean evaluateLogicalOperator ( FieldContainer container , Map < GenericCriteriaPrompt , Object > promptValues ) { } }
|
boolean result = false ; if ( m_criteriaList . size ( ) == 0 ) { result = true ; } else { for ( GenericCriteria criteria : m_criteriaList ) { result = criteria . evaluate ( container , promptValues ) ; if ( ( m_operator == TestOperator . AND && ! result ) || ( m_operator == TestOperator . OR && result ) ) { break ; } } } return result ;
|
public class DFAs { /** * Calculates the implication ( " = & gt ; " ) of two DFA , and stores the result in a given mutable DFA .
* @ param dfa1
* the first DFA
* @ param dfa2
* the second DFA
* @ param inputs
* the input symbols to consider
* @ param out
* a mutable DFA for storing the result
* @ return { @ code out } , for convenience */
public static < I , S , A extends MutableDFA < S , I > > A impl ( DFA < ? , I > dfa1 , DFA < ? , I > dfa2 , Collection < ? extends I > inputs , A out ) { } }
|
return combine ( dfa1 , dfa2 , inputs , out , AcceptanceCombiner . IMPL ) ;
|
public class HttpServerHandlerBinder { /** * Bind a new handler into the jetty service . These handlers are bound before the servlet handler and can handle parts
* of the URI space before a servlet sees it .
* Do not use this method to bind logging handlers as they would be called before any functionality has been executed and
* logging information would be incomplete . Use { @ link HttpServerHandlerBinder # bindLoggingHandler ( Binder ) instead . */
public static LinkedBindingBuilder < Handler > bindHandler ( final Binder binder ) { } }
|
final Multibinder < Handler > handlers = Multibinder . newSetBinder ( binder , Handler . class , HANDLER_NAMED ) ; return handlers . addBinding ( ) ;
|
public class PutInventoryRequest { /** * The inventory items that you want to add or update on instances .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the
* existing values .
* @ param items
* The inventory items that you want to add or update on instances .
* @ return Returns a reference to this object so that method calls can be chained together . */
public PutInventoryRequest withItems ( InventoryItem ... items ) { } }
|
if ( this . items == null ) { setItems ( new com . amazonaws . internal . SdkInternalList < InventoryItem > ( items . length ) ) ; } for ( InventoryItem ele : items ) { this . items . add ( ele ) ; } return this ;
|
public class SDPWrapper { /** * Convenience method for creating a new { @ link RTPInfo } object .
* @ param connection
* the connection ( the c - field ) information from the SDP or null
* if there were none .
* @ param md
* the media description from the SDP
* @ return a new { @ link RTPInfo } object if the { @ link MediaDescription } was
* of type " RTP / AVP " , otherwise null .
* @ throws SdpParseException */
private RTPInfo processMediaDescription ( final Connection connection , final MediaDescription md ) throws SdpParseException { } }
|
final Media m = md . getMedia ( ) ; if ( "RTP/AVP" . equalsIgnoreCase ( m . getProtocol ( ) ) ) { final Connection c = md . getConnection ( ) != null ? null : connection ; return new RTPInfoImpl ( connection , md ) ; } return null ;
|
public class OneForOneBlockFetcher { /** * Invokes the " onBlockFetchFailure " callback for every listed block id . */
private void failRemainingBlocks ( String [ ] failedBlockIds , Throwable e ) { } }
|
for ( String blockId : failedBlockIds ) { try { listener . onBlockFetchFailure ( blockId , e ) ; } catch ( Exception e2 ) { logger . error ( "Error in block fetch failure callback" , e2 ) ; } }
|
public class AbstractFileStorageEngine { /** * Removes recursively all empty parent directories up to and excluding the storage directory .
* @ param path
* @ throws IOException */
private void cleanEmptyParentDirectory ( Path path ) throws IOException { } }
|
Path normPath = path . normalize ( ) ; if ( normPath . equals ( Paths . get ( getDirectory ( ) ) . normalize ( ) ) || normPath . equals ( Paths . get ( System . getProperty ( "java.io.tmpdir" ) ) . normalize ( ) ) ) { // stop if we reach the output or temporary directory
return ; } try { Files . deleteIfExists ( path ) ; // delete the directory if empty
cleanEmptyParentDirectory ( path . getParent ( ) ) ; // do the same with parent directory
} catch ( DirectoryNotEmptyException ex ) { // if directory non - empty ignore exception
}
|
public class Matrix4f { /** * Apply an arbitrary perspective projection frustum transformation for a right - handed coordinate system
* using the given NDC z range to this matrix and store the result in < code > dest < / code > .
* If < code > M < / code > is < code > this < / code > matrix and < code > F < / code > the frustum matrix ,
* then the new matrix will be < code > M * F < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * F * v < / code > ,
* the frustum transformation will be applied first !
* In order to set the matrix to a perspective frustum transformation without post - multiplying ,
* use { @ link # setFrustum ( float , float , float , float , float , float , boolean ) setFrustum ( ) } .
* Reference : < a href = " http : / / www . songho . ca / opengl / gl _ projectionmatrix . html # perspective " > http : / / www . songho . ca < / a >
* @ see # setFrustum ( float , float , float , float , float , float , boolean )
* @ param left
* the distance along the x - axis to the left frustum edge
* @ param right
* the distance along the x - axis to the right frustum edge
* @ param bottom
* the distance along the y - axis to the bottom frustum edge
* @ param top
* the distance along the y - axis to the top frustum edge
* @ param zNear
* near clipping plane distance . If the special value { @ link Float # POSITIVE _ INFINITY } is used , the near clipping plane will be at positive infinity .
* In that case , < code > zFar < / code > may not also be { @ link Float # POSITIVE _ INFINITY } .
* @ param zFar
* far clipping plane distance . If the special value { @ link Float # POSITIVE _ INFINITY } is used , the far clipping plane will be at positive infinity .
* In that case , < code > zNear < / code > may not also be { @ link Float # POSITIVE _ INFINITY } .
* @ param zZeroToOne
* whether to use Vulkan ' s and Direct3D ' s NDC z range of < code > [ 0 . . + 1 ] < / code > when < code > true < / code >
* or whether to use OpenGL ' s NDC z range of < code > [ - 1 . . + 1 ] < / code > when < code > false < / code >
* @ param dest
* will hold the result
* @ return dest */
public Matrix4f frustum ( float left , float right , float bottom , float top , float zNear , float zFar , boolean zZeroToOne , Matrix4f dest ) { } }
|
if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . setFrustum ( left , right , bottom , top , zNear , zFar , zZeroToOne ) ; return frustumGeneric ( left , right , bottom , top , zNear , zFar , zZeroToOne , dest ) ;
|
public class Term { /** * Determines if this term is of the form " F & lt ; OP & gt ; E " where F is the
* specified field , & lt ; OP & gt ; is an operator , and E is an expression . If
* so , the method returns & lt ; OP & gt ; . If not , the method returns null .
* @ param fldName
* the name of the field
* @ return either the operator or null */
public Operator operator ( String fldName ) { } }
|
if ( lhs . isFieldName ( ) && lhs . asFieldName ( ) . equals ( fldName ) ) return op ; if ( rhs . isFieldName ( ) && rhs . asFieldName ( ) . equals ( fldName ) ) return op . complement ( ) ; return null ;
|
public class LambdaConfigTypeMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LambdaConfigType lambdaConfigType , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( lambdaConfigType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( lambdaConfigType . getPreSignUp ( ) , PRESIGNUP_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getCustomMessage ( ) , CUSTOMMESSAGE_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getPostConfirmation ( ) , POSTCONFIRMATION_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getPreAuthentication ( ) , PREAUTHENTICATION_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getPostAuthentication ( ) , POSTAUTHENTICATION_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getDefineAuthChallenge ( ) , DEFINEAUTHCHALLENGE_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getCreateAuthChallenge ( ) , CREATEAUTHCHALLENGE_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getVerifyAuthChallengeResponse ( ) , VERIFYAUTHCHALLENGERESPONSE_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getPreTokenGeneration ( ) , PRETOKENGENERATION_BINDING ) ; protocolMarshaller . marshall ( lambdaConfigType . getUserMigration ( ) , USERMIGRATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class SimpleWeightedDirectedTypedEdge { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) public < E extends Edge > E flip ( ) { } }
|
return ( E ) ( new SimpleWeightedDirectedTypedEdge < T > ( type , to , from , weight ) ) ;
|
public class JMStats { /** * Min number .
* @ param < N > the type parameter
* @ param numberList the number list
* @ return the number */
public static < N extends Number > Number min ( List < N > numberList ) { } }
|
return cal ( numberList , DoubleStream :: min ) ;
|
public class SLINKHDBSCANLinearMemory { /** * Third step : Determine the values for P and L
* @ param id the id of the object to be inserted into the pointer
* representation
* @ param pi Pi data store
* @ param lambda Lambda data store
* @ param processedIDs the already processed ids
* @ param m Data store */
private void step3 ( DBIDRef id , WritableDBIDDataStore pi , WritableDoubleDataStore lambda , DBIDs processedIDs , WritableDoubleDataStore m ) { } }
|
DBIDVar p_i = DBIDUtil . newVar ( ) ; // for i = 1 . . n
for ( DBIDIter it = processedIDs . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double l_i = lambda . doubleValue ( it ) ; double m_i = m . doubleValue ( it ) ; pi . assignVar ( it , p_i ) ; // p _ i = pi ( it )
double mp_i = m . doubleValue ( p_i ) ; // if L ( i ) > = M ( i )
if ( l_i >= m_i ) { // M ( P ( i ) ) = min { M ( P ( i ) ) , L ( i ) }
if ( l_i < mp_i ) { m . putDouble ( p_i , l_i ) ; } // L ( i ) = M ( i )
lambda . putDouble ( it , m_i ) ; // P ( i ) = n + 1;
pi . put ( it , id ) ; } else { // M ( P ( i ) ) = min { M ( P ( i ) ) , M ( i ) }
if ( m_i < mp_i ) { m . putDouble ( p_i , m_i ) ; } } }
|
public class Configurator { /** * Checks that for every dependency referred , there is a matching property */
static void checkDependencyReferencesPresent ( List < AccessibleObject > objects , Map < String , AccessibleObject > props ) { } }
|
// iterate overall properties marked by @ Property
for ( int i = 0 ; i < objects . size ( ) ; i ++ ) { // get the Property annotation
AccessibleObject ao = objects . get ( i ) ; Property annotation = ao . getAnnotation ( Property . class ) ; if ( annotation == null ) { throw new IllegalArgumentException ( "@Property annotation is required for checking dependencies;" + " annotation is missing for Field/Method " + ao . toString ( ) ) ; } String dependsClause = annotation . dependsUpon ( ) ; if ( dependsClause . trim ( ) . isEmpty ( ) ) continue ; // split dependsUpon specifier into tokens ; trim each token ; search for token in list
StringTokenizer st = new StringTokenizer ( dependsClause , "," ) ; while ( st . hasMoreTokens ( ) ) { String token = st . nextToken ( ) . trim ( ) ; // check that the string representing a property name is in the list
boolean found = false ; Set < String > keyset = props . keySet ( ) ; for ( Iterator < String > iter = keyset . iterator ( ) ; iter . hasNext ( ) ; ) { if ( iter . next ( ) . equals ( token ) ) { found = true ; break ; } } if ( ! found ) { throw new IllegalArgumentException ( "@Property annotation " + annotation . name ( ) + " has an unresolved dependsUpon property: " + token ) ; } } }
|
public class ComStmtPrepare { /** * Send directly to socket the sql data .
* @ param pos the writer
* @ throws IOException if connection error occur */
public void send ( PacketOutputStream pos ) throws IOException { } }
|
pos . startPacket ( 0 ) ; pos . write ( COM_STMT_PREPARE ) ; pos . write ( this . sql ) ; pos . flush ( ) ;
|
public class ArchiveExtractor { /** * parse name without directories */
private String getFileName ( String name ) { } }
|
// check if the environment is linux or windows
if ( name . contains ( Constants . FORWARD_SLASH ) ) { name = name . substring ( name . lastIndexOf ( Constants . FORWARD_SLASH ) + 1 , name . length ( ) ) ; } else if ( name . contains ( Constants . BACK_SLASH ) ) { name = name . substring ( name . lastIndexOf ( Constants . BACK_SLASH ) + 1 , name . length ( ) ) ; } return name ;
|
public class MvcStateKeeperHolder { /** * Restore model of all { @ link Bean } s currently live in the { @ link Mvc # graph ( ) }
* @ Bundle savedState the saved state */
static void restoreState ( Bundle savedState ) { } }
|
stateKeeper . bundle = savedState ; MvcComponent root = getRootComponent ( ) ; doRestoreState ( root ) ; stateKeeper . bundle = null ;
|
public class JobHistoryFileParserBase { /** * parses the - Xmx value from the mapred . child . java . opts
* in the job conf usually appears as the
* following in the job conf :
* " mapred . child . java . opts " : " - Xmx3072M "
* or
* " mapred . child . java . opts " : " - Xmx1024m - verbose : gc - Xloggc : / tmp / @ taskid @ . gc
* @ return xmx value in MB */
public static long getXmxValue ( String javaChildOptsStr ) { } }
|
long retVal = 0L ; String valueStr = extractXmxValueStr ( javaChildOptsStr ) ; char lastChar = valueStr . charAt ( valueStr . length ( ) - 1 ) ; try { if ( Character . isLetter ( lastChar ) ) { String xmxValStr = valueStr . substring ( 0 , valueStr . length ( ) - 1 ) ; retVal = Long . parseLong ( xmxValStr ) ; switch ( lastChar ) { case 'M' : case 'm' : // do nothing , since it ' s already in megabytes
break ; case 'K' : case 'k' : // convert kilobytes to megabytes
retVal /= 1024 ; break ; case 'G' : case 'g' : // convert gigabytes to megabtyes
retVal *= 1024 ; break ; default : throw new ProcessingException ( "Unable to get the Xmx value from " + javaChildOptsStr + " invalid value for Xmx " + xmxValStr ) ; } } else { retVal = Long . parseLong ( valueStr ) ; // now convert to megabytes
// since this was in bytes since the last char was absent
retVal /= ( 1024 * 1024 ) ; } } catch ( NumberFormatException nfe ) { LOG . error ( "Unable to get the Xmx value from " + javaChildOptsStr + "\n" , nfe ) ; throw new ProcessingException ( "Unable to get the Xmx value from " + javaChildOptsStr , nfe ) ; } return retVal ;
|
public class AbstractGrabber { /** * This method is called by a video frame , when it is being recycled .
* @ param frame the frame being recycled . */
final void recycleVideoBuffer ( BaseVideoFrame frame ) { } }
|
// Make sure we are in started state
if ( state . isStarted ( ) ) { enqueueBuffer ( object , frame . getBufferInex ( ) ) ; synchronized ( availableVideoFrames ) { availableVideoFrames . add ( frame ) ; availableVideoFrames . notify ( ) ; } }
|
public class CertificateValidity { /** * Get the attribute value . */
public Date get ( String name ) throws IOException { } }
|
if ( name . equalsIgnoreCase ( NOT_BEFORE ) ) { return ( getNotBefore ( ) ) ; } else if ( name . equalsIgnoreCase ( NOT_AFTER ) ) { return ( getNotAfter ( ) ) ; } else { throw new IOException ( "Attribute name not recognized by " + "CertAttrSet: CertificateValidity." ) ; }
|
public class AbstractRadialBargraph { /** * Returns the bargraph track image
* with the given with and height .
* @ param WIDTH
* @ param START _ ANGLE
* @ param ANGLE _ EXTEND
* @ param APEX _ ANGLE
* @ param BARGRAPH _ OFFSET
* @ return buffered image containing the bargraph track image */
protected BufferedImage create_BARGRAPH_TRACK_Image ( final int WIDTH , final double START_ANGLE , final double ANGLE_EXTEND , final double APEX_ANGLE , final double BARGRAPH_OFFSET ) { } }
|
return create_BARGRAPH_TRACK_Image ( WIDTH , START_ANGLE , ANGLE_EXTEND , APEX_ANGLE , BARGRAPH_OFFSET , null ) ;
|
public class PackageCommand { /** * Determine the default package format for the current operating system .
* @ return " pax " on z / OS and " zip " for all others */
private String getDefaultPackageExtension ( ) { } }
|
// Default package format on z / OS is a pax
if ( "z/OS" . equalsIgnoreCase ( bootProps . get ( "os.name" ) ) ) { return "pax" ; } if ( PackageProcessor . IncludeOption . RUNNABLE . matches ( includeOption ) ) { return "jar" ; } return "zip" ;
|
public class TypeValidator { /** * Expect the type to autobox to be an Iterable .
* @ return True if there was no warning , false if there was a mismatch . */
boolean expectAutoboxesToIterable ( Node n , JSType type , String msg ) { } }
|
// Note : we don ' t just use JSType . autobox ( ) here because that removes null and undefined .
// We want to keep null and undefined around .
if ( type . isUnionType ( ) ) { for ( JSType alt : type . toMaybeUnionType ( ) . getAlternates ( ) ) { alt = alt . isBoxableScalar ( ) ? alt . autoboxesTo ( ) : alt ; if ( ! alt . isSubtypeOf ( getNativeType ( ITERABLE_TYPE ) ) ) { mismatch ( n , msg , type , ITERABLE_TYPE ) ; return false ; } } } else { JSType autoboxedType = type . isBoxableScalar ( ) ? type . autoboxesTo ( ) : type ; if ( ! autoboxedType . isSubtypeOf ( getNativeType ( ITERABLE_TYPE ) ) ) { mismatch ( n , msg , type , ITERABLE_TYPE ) ; return false ; } } return true ;
|
public class CmsUserInfo { /** * Shows the user preferences dialog . < p > */
void editUserData ( ) { } }
|
if ( m_context instanceof CmsEmbeddedDialogContext ) { ( ( CmsEmbeddedDialogContext ) m_context ) . closeWindow ( true ) ; } else { A_CmsUI . get ( ) . closeWindows ( ) ; } CmsUserDataDialog dialog = new CmsUserDataDialog ( m_context ) ; m_context . start ( CmsVaadinUtils . getMessageText ( Messages . GUI_USER_EDIT_0 ) , dialog ) ;
|
public class VTensor { /** * Sets the value of the entry corresponding to the given indices .
* @ param indices The indices of the multi - dimensional array .
* @ param val The value to set .
* @ return The previous value . */
public double set ( int [ ] indices , double val ) { } }
|
checkIndices ( indices ) ; int c = getConfigIdx ( indices ) ; return values . set ( c , val ) ;
|
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcAssemblyPlaceEnum createIfcAssemblyPlaceEnumFromString ( EDataType eDataType , String initialValue ) { } }
|
IfcAssemblyPlaceEnum result = IfcAssemblyPlaceEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
|
public class Matrix4f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4fc # mul ( org . joml . Matrix4x3fc , org . joml . Matrix4f ) */
public Matrix4f mul ( Matrix4x3fc right , Matrix4f dest ) { } }
|
if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . set ( right ) ; else if ( ( right . properties ( ) & PROPERTY_IDENTITY ) != 0 ) return dest . set ( this ) ; else if ( ( properties & PROPERTY_PERSPECTIVE ) != 0 && ( right . properties ( ) & PROPERTY_AFFINE ) != 0 ) return mulPerspectiveAffine ( right , dest ) ; return mulGeneric ( right , dest ) ;
|
public class NameExtractor { /** * This method will find all the attached preps of a phrase . */
private static void getAttachedPrep ( final List < Token > sentenceToken , final List < Phrase > sentencePhrase , final int index ) { } }
|
final String prep ; boolean nameSequenceMeetEnd = true ; final Collection < Phrase > phraseSequence = new HashSet < > ( ) ; int phrasePtr = index ; Phrase currentNamePhrase = sentencePhrase . get ( phrasePtr ) ; int phrasePtrInSentence ; if ( currentNamePhrase . attachedWordMap . get ( "prep" ) != null ) return ; // we need to find out all the name phrases in a sequence :
// Example : Students who came from China , America and Australia are here .
// in the example above , China , America and Australia are three name phrases all attached to the prep : from .
// first loop , search forward to find all the names before the pointer and the attached prep
while ( true ) { currentNamePhrase = sentencePhrase . get ( phrasePtr ) ; phrasePtrInSentence = currentNamePhrase . phrasePosition ; if ( phrasePtrInSentence == 0 ) return ; final String attachedWord = sentenceToken . get ( phrasePtrInSentence - 1 ) . text ; // if the attached word is a comma or ' and ' / ' or ' , we consider it as a conj .
if ( "," . equalsIgnoreCase ( attachedWord ) ) { nameSequenceMeetEnd = false ; phraseSequence . add ( currentNamePhrase ) ; } else if ( "and" . equalsIgnoreCase ( attachedWord ) || "or" . equalsIgnoreCase ( attachedWord ) ) { // meet end
phraseSequence . add ( currentNamePhrase ) ; nameSequenceMeetEnd = true ; } else if ( Dictionary . checkup ( toEngLowerCase ( attachedWord ) ) != null && Dictionary . checkup ( toEngLowerCase ( attachedWord ) ) . startsWith ( "IN" ) ) { prep = attachedWord ; phraseSequence . add ( currentNamePhrase ) ; break ; } else { return ; } phrasePtr -- ; if ( phrasePtr < 0 ) return ; if ( sentencePhrase . get ( phrasePtr ) . isDate ) return ; if ( sentencePhrase . get ( phrasePtr ) . phrasePosition + sentencePhrase . get ( phrasePtr ) . phraseLength + 1 != phrasePtrInSentence ) return ; // method terminates if the phrase before is not next to this phrase . This means the name sequence is broken .
} phrasePtr = index + 1 ; // second loop , search backward to find the names behind the pointer
// noinspection LoopConditionNotUpdatedInsideLoop
while ( ! nameSequenceMeetEnd ) { if ( phrasePtr == sentencePhrase . size ( ) ) return ; currentNamePhrase = sentencePhrase . get ( phrasePtr ) ; if ( currentNamePhrase . isDate ) return ; phrasePtrInSentence = currentNamePhrase . phrasePosition ; if ( sentencePhrase . get ( phrasePtr - 1 ) . phrasePosition + sentencePhrase . get ( phrasePtr - 1 ) . phraseLength + 1 != currentNamePhrase . phrasePosition ) return ; // method terminates if the phrase after is not next to this phrase .
final String attachedWord = sentenceToken . get ( phrasePtrInSentence - 1 ) . text ; // if the attached word is a comma or ' and ' / ' or ' , we consider it as a conj .
if ( "," . equalsIgnoreCase ( attachedWord ) ) { phraseSequence . add ( currentNamePhrase ) ; } else if ( "and" . equalsIgnoreCase ( attachedWord ) || "or" . equalsIgnoreCase ( attachedWord ) ) { // meet end
phraseSequence . add ( currentNamePhrase ) ; break ; } else { return ; } phrasePtr ++ ; } // finally , attach the prep with the words in the phraseSequence
for ( final Phrase name : phraseSequence ) { name . attachedWordMap . put ( "prep" , prep ) ; }
|
public class OSSUnderFileSystemFactory { /** * @ param conf optional configuration object for the UFS
* @ return true if both access , secret and endpoint keys are present , false otherwise */
private boolean checkOSSCredentials ( UnderFileSystemConfiguration conf ) { } }
|
return conf . isSet ( PropertyKey . OSS_ACCESS_KEY ) && conf . isSet ( PropertyKey . OSS_SECRET_KEY ) && conf . isSet ( PropertyKey . OSS_ENDPOINT_KEY ) ;
|
public class MultiLayerNetwork { /** * Set the parameters of the netowrk . Note that the parameter keys must match the format as described in { @ link # getParam ( String ) }
* and { @ link # paramTable ( ) } . Note that the values of the parameters used as an argument to this method are copied -
* i . e . , it is safe to later modify / reuse the values in the provided paramTable without this impacting the network .
* @ param paramTable Parameters to set */
@ Override public void setParamTable ( Map < String , INDArray > paramTable ) { } }
|
Map < String , INDArray > currParamTable = paramTable ( ) ; if ( ! currParamTable . keySet ( ) . equals ( paramTable . keySet ( ) ) ) { throw new IllegalArgumentException ( "Cannot set param table: parameter keys do not match.\n" + "Current: " + currParamTable . keySet ( ) + "\nTo set: " + paramTable . keySet ( ) ) ; } for ( String s : paramTable . keySet ( ) ) { INDArray curr = currParamTable . get ( s ) ; INDArray toSet = paramTable . get ( s ) ; if ( ! Arrays . equals ( curr . shape ( ) , toSet . shape ( ) ) ) { throw new IllegalArgumentException ( "Cannot set parameter table: parameter \"" + s + "\" shapes " + "do not match. Current = " + Arrays . toString ( curr . shape ( ) ) + ", to set = " + Arrays . toString ( toSet . shape ( ) ) ) ; } } // Now that we ' ve checked ALL params ( to avoid leaving net in half - modified state )
for ( String s : paramTable . keySet ( ) ) { INDArray curr = currParamTable . get ( s ) ; INDArray toSet = paramTable . get ( s ) ; curr . assign ( toSet ) ; }
|
public class MCF1Impl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } }
|
switch ( featureID ) { case AfplibPackage . MCF1__RG_LENGTH : setRGLength ( RG_LENGTH_EDEFAULT ) ; return ; case AfplibPackage . MCF1__RG : getRG ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
|
public class SessionSimpleHashMap { /** * This method puts an entry into the HashMap . It does follow HashMap semantics by checking for
* an existing entry and returning that entry when we replace it . However , the session component
* ensures there is not an existing entry prior to calling put , so we don ' t expect to ever get
* a non - null value back .
* @ see java . util . Map # put ( java . lang . Object , java . lang . Object ) */
@ Override public synchronized Object put ( Object key , Object value ) { } }
|
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINER ) ) { // PM16861
StringBuffer sb = new StringBuffer ( "{" ) . append ( key ) . append ( "} " ) . append ( appNameForLogging ) ; LoggingUtil . SESSION_LOGGER_CORE . entering ( methodClassName , methodNames [ PUT ] , sb . toString ( ) ) ; } Object replacedEntry = null ; // First see if replacing an existing entry
Object currEntry = super . get ( key ) ; if ( currEntry != null ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ PUT ] , "replacing existing entry" ) ; } replacedEntry = super . put ( key , value ) ; } else { if ( ( overflowAllowed ) && ( OverflowTabl != null ) ) { synchronized ( OverflowTablLock ) { currEntry = OverflowTabl . get ( key ) ; if ( currEntry != null ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ PUT ] , "replacing existing entry in overflow Hashmap" ) ; } replacedEntry = OverflowTabl . put ( key , value ) ; } } } } // Handle new entries
if ( currEntry == null ) { currentSize ++ ; // increment pmi counter
if ( _iStore . getStoreCallback ( ) != null ) { _iStore . getStoreCallback ( ) . sessionLiveCountInc ( value ) ; } if ( currentSize <= maxSize ) { replacedEntry = super . put ( key , value ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ PUT ] , "add new entry to Hashmap" ) ; } } else { // overflow
currentSize -- ; if ( overflowAllowed ) { synchronized ( OverflowTablLock ) { if ( OverflowTabl == null ) { OverflowTabl = new HashMap ( currentSize , 1 ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ PUT ] , "Creating Overflow Table" ) ; } } replacedEntry = OverflowTabl . put ( key , value ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ PUT ] , "add new entry to overflow Hashmap" ) ; } } } else throw new TooManySessionsException ( ) ; // no overflow allowed
} } return replacedEntry ;
|
public class Gauge { /** * Adds the given Marker to the list of markers .
* @ param MARKER */
public void addMarker ( final Marker MARKER ) { } }
|
if ( null == MARKER ) return ; markers . add ( MARKER ) ; Collections . sort ( markers , new MarkerComparator ( ) ) ; fireUpdateEvent ( REDRAW_EVENT ) ;
|
public class CPDefinitionPersistenceImpl { /** * Returns the cp definitions before and after the current cp definition in the ordered set where CProductId = & # 63 ; and status = & # 63 ; .
* @ param CPDefinitionId the primary key of the current cp definition
* @ param CProductId the c product ID
* @ param status the status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next cp definition
* @ throws NoSuchCPDefinitionException if a cp definition with the primary key could not be found */
@ Override public CPDefinition [ ] findByC_S_PrevAndNext ( long CPDefinitionId , long CProductId , int status , OrderByComparator < CPDefinition > orderByComparator ) throws NoSuchCPDefinitionException { } }
|
CPDefinition cpDefinition = findByPrimaryKey ( CPDefinitionId ) ; Session session = null ; try { session = openSession ( ) ; CPDefinition [ ] array = new CPDefinitionImpl [ 3 ] ; array [ 0 ] = getByC_S_PrevAndNext ( session , cpDefinition , CProductId , status , orderByComparator , true ) ; array [ 1 ] = cpDefinition ; array [ 2 ] = getByC_S_PrevAndNext ( session , cpDefinition , CProductId , status , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
|
public class NearestNeighborAffinityMatrixBuilder { /** * Check if the index array contains { @ code i } .
* TODO : sort arrays , use binary search !
* @ param is Array to search
* @ param i Index to search
* @ return Position of index i , or { @ code - 1 } if not found . */
protected static int containsIndex ( int [ ] is , int i ) { } }
|
for ( int j = 0 ; j < is . length ; j ++ ) { if ( i == is [ j ] ) { return j ; } } return - 1 ;
|
public class ButterKnife { /** * Returns true when the return value should be propagated . Use a default otherwise . */
private static boolean validateReturnType ( Method method , Class < ? > expected ) { } }
|
Class < ? > returnType = method . getReturnType ( ) ; if ( returnType == void . class ) { return false ; } if ( returnType != expected ) { String expectedType = "'" + expected . getName ( ) + "'" ; if ( expected != void . class ) { expectedType = "'void' or " + expectedType ; } throw new IllegalStateException ( method . getDeclaringClass ( ) . getName ( ) + "." + method . getName ( ) + " must have return type of " + expectedType ) ; } return true ;
|
public class SubjectAlternativeNameExtension { /** * Get the attribute value . */
public GeneralNames get ( String name ) throws IOException { } }
|
if ( name . equalsIgnoreCase ( SUBJECT_NAME ) ) { return ( names ) ; } else { throw new IOException ( "Attribute name not recognized by " + "CertAttrSet:SubjectAlternativeName." ) ; }
|
public class AbstractBindingTypeDef { protected boolean isSimpleNamingAutoBindable ( String propertyName , Class < ? > propertyType , ComponentDef cd ) { } }
|
final String componentName = cd . getComponentName ( ) ; if ( componentName == null ) { return false ; } if ( componentName . equals ( propertyName ) ) { // e . g . seaLogic for SeaLogic
return true ; } if ( componentName . endsWith ( ContainerConstants . PACKAGE_SEP + propertyName ) ) { // e . g . sea [ _ landLogic ]
return true ; } return false ;
|
public class CookieHelper { /** * Need to call this by reflection for backwards compatibility with Servlet 2.5 */
static boolean isHttpOnlyReflect ( javax . servlet . http . Cookie servletCookie ) { } }
|
try { return ( Boolean ) servletCookie . getClass ( ) . getMethod ( "isHttpOnly" ) . invoke ( servletCookie ) ; } catch ( Exception e ) { // Cookie . logger . warn ( " You are trying to get HttpOnly from a cookie , but it appears you are running on Servlet version before 3.0 . Returning false . . which can be false ! " ) ;
return false ; // return default . Should we be throwing exception here ?
}
|
public class FatLfnDirectory { /** * { @ inheritDoc }
* < / p > < p >
* According to the FAT file system specification , leading and trailing
* spaces in the { @ code name } are ignored by this method .
* @ param name { @ inheritDoc }
* @ return { @ inheritDoc }
* @ throws IOException { @ inheritDoc } */
@ Override public FatLfnDirectoryEntry addDirectory ( String name ) throws IOException { } }
|
checkWritable ( ) ; checkUniqueName ( name ) ; name = name . trim ( ) ; final ShortName sn = makeShortName ( name ) ; final FatDirectoryEntry real = dir . createSub ( fat ) ; real . setShortName ( sn ) ; final FatLfnDirectoryEntry e = new FatLfnDirectoryEntry ( this , real , name ) ; try { dir . addEntries ( e . compactForm ( ) ) ; } catch ( IOException ex ) { final ClusterChain cc = new ClusterChain ( fat , real . getStartCluster ( ) , false ) ; cc . setChainLength ( 0 ) ; dir . removeEntry ( real ) ; throw ex ; } shortNameIndex . put ( sn , e ) ; longNameIndex . put ( name . toLowerCase ( Locale . ROOT ) , e ) ; getDirectory ( real ) ; flush ( ) ; return e ;
|
public class BPMActivator { /** * { @ inheritDoc } */
@ Override public ServiceHandler activateService ( QName name , ComponentModel config ) { } }
|
return new BPMExchangeHandler ( ( BPMComponentImplementationModel ) config . getImplementation ( ) , getServiceDomain ( ) , name ) ;
|
public class DurationField { /** * Because parsing done by base class returns a different date than parsing
* done by the user or converting duration to a date . But for the
* DurationField comparison only the time is important . This function helps
* comparing the time and ignores the values for day , month and year .
* @ param d1
* date , which time will compared with the time of d2
* @ param d2
* date , which time will compared with the time of d1
* @ return the value 0 if the time represented d1 is equal to the time
* represented by d2 ; a value less than 0 if the time of d1 is
* before the time of d2 ; and a value greater than 0 if the time of
* d1 is after the time represented by d2. */
private int compareTimeOfDates ( final Date d1 , final Date d2 ) { } }
|
final LocalTime lt1 = LocalDateTime . ofInstant ( d1 . toInstant ( ) , ZONEID_UTC ) . toLocalTime ( ) ; final LocalTime lt2 = LocalDateTime . ofInstant ( d2 . toInstant ( ) , ZONEID_UTC ) . toLocalTime ( ) ; return lt1 . compareTo ( lt2 ) ;
|
public class GraphAnalyticBase { /** * Set the parallelism for this analytic ' s operators . This parameter is
* necessary because processing a small amount of data with high operator
* parallelism is slow and wasteful with memory and buffers .
* < p > Operator parallelism should be set to this given value unless
* processing asymptotically more data , in which case the default job
* parallelism should be inherited .
* @ param parallelism operator parallelism
* @ return this */
public GraphAnalyticBase < K , VV , EV , T > setParallelism ( int parallelism ) { } }
|
Preconditions . checkArgument ( parallelism > 0 || parallelism == PARALLELISM_DEFAULT , "The parallelism must be at least one, or ExecutionConfig.PARALLELISM_DEFAULT (use system default)." ) ; this . parallelism = parallelism ; return this ;
|
public class RoutingSystemBroadcast { /** * with a secure APDU or sync . req / res , the SBC currently has to be checked at an upper layer */
public static boolean isSystemBroadcast ( final CEMI frame ) { } }
|
if ( frame . getMessageCode ( ) == CEMILData . MC_LDATA_IND && frame instanceof CEMILData ) { final CEMILData ldata = ( CEMILData ) frame ; final KNXAddress dst = ldata . getDestination ( ) ; return ldata . isSystemBroadcast ( ) && dst instanceof GroupAddress && dst . getRawAddress ( ) == 0 ; } return false ;
|
public class DefaultElementProducer { /** * Calls { @ link # createTableCell ( java . lang . Object , java . util . Collection , boolean ) }
* @ param val
* @ param style
* @ param noWrap
* @ return
* @ throws com . vectorprint . VectorPrintException */
public PdfPCell createTableCell ( Object val , BaseStyler style , boolean noWrap ) throws VectorPrintException { } }
|
return createTableCell ( val , toCollection ( style ) , noWrap ) ;
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcConstructionEquipmentResource ( ) { } }
|
if ( ifcConstructionEquipmentResourceEClass == null ) { ifcConstructionEquipmentResourceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 111 ) ; } return ifcConstructionEquipmentResourceEClass ;
|
public class BaseTaglet { /** * { @ inheritDoc }
* @ throws UnsupportedTagletOperationException thrown when the method is
* not supported by the taglet . */
public Content getTagletOutput ( Element element , DocTree tag , TagletWriter writer ) { } }
|
throw new UnsupportedTagletOperationException ( "Method not supported in taglet " + getName ( ) + "." ) ;
|
public class CreateTableRequest { /** * Adds split at the specified key to the configuration
* @ param key */
public CreateTableRequest addSplit ( ByteString key ) { } }
|
Preconditions . checkNotNull ( key ) ; createTableRequest . addInitialSplitsBuilder ( ) . setKey ( key ) ; return this ;
|
public class BRGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
|
switch ( featureID ) { case AfplibPackage . BRG__RGRP_NAME : return getRGrpName ( ) ; case AfplibPackage . BRG__TRIPLETS : return getTriplets ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
|
public class SweepHullDelaunay2D { /** * Flip a single triangle , if necessary .
* @ param i Triangle number
* @ param flipped Bitset to modify
* @ return number of other triangle , or - 1 */
int flipTriangle ( int i , long [ ] flipped ) { } }
|
final Triangle cur = tris . get ( i ) ; // Test edge AB :
if ( cur . ab >= 0 ) { final int ot = cur . ab ; Triangle oth = tris . get ( ot ) ; Orientation orient = cur . findOrientation ( oth ) ; final int opp , lef , rig ; switch ( orient ) { case ORIENT_AB_BA : opp = oth . c ; lef = oth . bc ; rig = oth . ca ; break ; case ORIENT_AB_CB : opp = oth . a ; lef = oth . ca ; rig = oth . ab ; break ; case ORIENT_AB_AC : opp = oth . b ; lef = oth . ab ; rig = oth . bc ; break ; default : throw new RuntimeException ( "Neighbor triangles not aligned?" ) ; } if ( cur . inCircle ( points . get ( opp ) ) ) { // Replace edge AB , connect c with " opp " instead .
final int a = cur . c , b = cur . a , c = opp , d = cur . b ; final int ab = cur . ca , bc = lef , cd = rig , da = cur . bc ; final int ca = ot , ac = i ; // Update current :
cur . set ( a , ab , b , bc , c , ca ) ; cur . updateCircumcircle ( points ) ; // Update other :
oth . set ( c , cd , d , da , a , ac ) ; oth . updateCircumcircle ( points ) ; // Update tri touching on BC and DA :
if ( bc >= 0 ) { tris . get ( bc ) . replaceEdge ( c , b , ot , i ) ; } if ( da >= 0 ) { tris . get ( da ) . replaceEdge ( a , d , i , ot ) ; } BitsUtil . setI ( flipped , i ) ; BitsUtil . setI ( flipped , ot ) ; return ot ; } } // Test edge BC :
if ( cur . bc >= 0 ) { final int ot = cur . bc ; Triangle oth = tris . get ( ot ) ; Orientation orient = cur . findOrientation ( oth ) ; final int opp , lef , rig ; switch ( orient ) { case ORIENT_BC_BA : opp = oth . c ; lef = oth . bc ; rig = oth . ca ; break ; case ORIENT_BC_CB : opp = oth . a ; lef = oth . ca ; rig = oth . ab ; break ; case ORIENT_BC_AC : opp = oth . b ; lef = oth . ab ; rig = oth . bc ; break ; default : throw new RuntimeException ( "Neighbor triangles not aligned? " + orient ) ; } if ( cur . inCircle ( points . get ( opp ) ) ) { // Replace edge BC , connect A with " opp " instead .
final int a = cur . a , b = cur . b , c = opp , d = cur . c ; final int ab = cur . ab , bc = lef , cd = rig , da = cur . ca ; final int ca = ot , ac = i ; // Update current :
cur . set ( a , ab , b , bc , c , ca ) ; cur . updateCircumcircle ( points ) ; // Update other :
oth . set ( c , cd , d , da , a , ac ) ; oth . updateCircumcircle ( points ) ; // Update tri touching on BC and DA :
if ( bc >= 0 ) { tris . get ( bc ) . replaceEdge ( c , b , ot , i ) ; } if ( da >= 0 ) { tris . get ( da ) . replaceEdge ( a , d , i , ot ) ; } BitsUtil . setI ( flipped , i ) ; BitsUtil . setI ( flipped , ot ) ; return ot ; } } // Test edge CA :
if ( cur . ca >= 0 ) { final int ot = cur . ca ; Triangle oth = tris . get ( ot ) ; Orientation orient = cur . findOrientation ( oth ) ; final int opp , lef , rig ; switch ( orient ) { case ORIENT_CA_BA : opp = oth . c ; lef = oth . bc ; rig = oth . ca ; break ; case ORIENT_CA_CB : opp = oth . a ; lef = oth . ca ; rig = oth . ab ; break ; case ORIENT_CA_AC : opp = oth . b ; lef = oth . ab ; rig = oth . bc ; break ; default : throw new RuntimeException ( "Neighbor triangles not aligned?" ) ; } if ( cur . inCircle ( points . get ( opp ) ) ) { // Replace edge CA , connect B with " opp " instead .
final int a = cur . b , b = cur . c , c = opp , d = cur . a ; final int ab = cur . bc , bc = lef , cd = rig , da = cur . ab ; final int ca = ot , ac = i ; // Update current :
cur . set ( a , ab , b , bc , c , ca ) ; cur . updateCircumcircle ( points ) ; // Update other :
oth . set ( c , cd , d , da , a , ac ) ; oth . updateCircumcircle ( points ) ; // Update tri touching on BC and DA :
if ( bc >= 0 ) { tris . get ( bc ) . replaceEdge ( c , b , ot , i ) ; } if ( da >= 0 ) { tris . get ( da ) . replaceEdge ( a , d , i , ot ) ; } BitsUtil . setI ( flipped , i ) ; BitsUtil . setI ( flipped , ot ) ; return ot ; } } return - 1 ;
|
public class Year { /** * Adjusts the specified temporal object to have this year .
* This returns a temporal object of the same observable type as the input
* with the year changed to be the same as this .
* The adjustment is equivalent to using { @ link Temporal # with ( TemporalField , long ) }
* passing { @ link ChronoField # YEAR } as the field .
* If the specified temporal object does not use the ISO calendar system then
* a { @ code DateTimeException } is thrown .
* In most cases , it is clearer to reverse the calling pattern by using
* { @ link Temporal # with ( TemporalAdjuster ) } :
* < pre >
* / / these two lines are equivalent , but the second approach is recommended
* temporal = thisYear . adjustInto ( temporal ) ;
* temporal = temporal . with ( thisYear ) ;
* < / pre >
* This instance is immutable and unaffected by this method call .
* @ param temporal the target object to be adjusted , not null
* @ return the adjusted object , not null
* @ throws DateTimeException if unable to make the adjustment
* @ throws ArithmeticException if numeric overflow occurs */
@ Override public Temporal adjustInto ( Temporal temporal ) { } }
|
if ( Chronology . from ( temporal ) . equals ( IsoChronology . INSTANCE ) == false ) { throw new DateTimeException ( "Adjustment only supported on ISO date-time" ) ; } return temporal . with ( YEAR , year ) ;
|
public class CachingOmemoStore { /** * Return the { @ link KeyCache } object of an { @ link OmemoManager } .
* @ param device
* @ return */
private KeyCache < T_IdKeyPair , T_IdKey , T_PreKey , T_SigPreKey , T_Sess > getCache ( OmemoDevice device ) { } }
|
KeyCache < T_IdKeyPair , T_IdKey , T_PreKey , T_SigPreKey , T_Sess > cache = caches . get ( device ) ; if ( cache == null ) { cache = new KeyCache < > ( ) ; caches . put ( device , cache ) ; } return cache ;
|
public class BashTabCompletionDoclet { /** * The Index file in the Bash Completion Doclet is what generates the actual tab - completion script .
* This will actually write out the shell completion output file .
* The Freemarker instance will see a top - level map that has two keys in it .
* The first key is for caller script options :
* SimpleMap callerScriptOptions = SimpleMap {
* " callerScriptName " : caller script name
* " callerScriptPrefixLegalArgs " : caller Script Prefix Legal Args
* " callerScriptPrefixArgValueTypes " : caller Script Prefix Arg Value Types
* " callerScriptPrefixMutexArgs " : caller Script Prefix Mutex Args
* " callerScriptPrefixAliasArgs " : caller Script Prefix Alias Args
* " callerScriptPrefixMinOccurrences " : caller Script Prefix Min Occurrences
* " callerScriptPrefixMaxOccurrences " : caller Script Prefix Max Occurrences
* " hasCallerScriptPrefixArgs " : has Caller Script Prefix Args
* " callerScriptPostfixLegalArgs " : caller Script Postfix Legal Args
* " callerScriptPostfixArgValueTypes " : caller Script Postfix Arg Value Types
* " callerScriptPostfixMutexArgs " : caller Script Postfix Mutex Args
* " callerScriptPostfixAliasArgs " : caller Script Postfix Alias Args
* " callerScriptPostfixMinOccurrences " : caller Script Postfix Min Occurrences
* " callerScriptPostfixMaxOccurrences " : caller Script Postfix Max Occurrences
* " hasCallerScriptPostfixArgs " : has Caller Script Postfix Args
* The second key is for tool options :
* SimpleMap tools = SimpleMap { ToolName : MasterPropertiesMap }
* where
* MasterPropertiesMap is a map containing the following Keys :
* all
* common
* positional
* hidden
* advanced
* deprecated
* optional
* dependent
* required
* Each of those keys maps to a List & lt ; SimpleMap & gt ; representing each property .
* These property maps each contain the following keys :
* kind
* name
* summary
* fulltext
* otherArgumentRequired
* synonyms
* exclusiveOf
* type
* options
* attributes
* required
* minRecValue
* maxRecValue
* minValue
* maxValue
* defaultValue
* minElements
* maxElements
* @ param cfg
* @ param workUnitList
* @ param groupMaps
* @ throws IOException */
@ Override protected void processIndexTemplate ( final Configuration cfg , final List < DocWorkUnit > workUnitList , final List < Map < String , String > > groupMaps ) throws IOException { } }
|
// Create a root map for all the work units so we can access all the info we need :
final Map < String , Object > propertiesMap = new HashMap < > ( ) ; workUnits . stream ( ) . forEach ( workUnit -> propertiesMap . put ( workUnit . getName ( ) , workUnit . getRootMap ( ) ) ) ; // Add everything into a nice package that we can iterate over
// while exposing the command line program names as keys :
final Map < String , Object > rootMap = new HashMap < > ( ) ; rootMap . put ( "tools" , propertiesMap ) ; // Add the caller script options into another top - level tree node :
final Map < String , Object > callerScriptOptionsMap = new HashMap < > ( ) ; callerScriptOptionsMap . put ( "callerScriptName" , callerScriptName ) ; callerScriptOptionsMap . put ( "callerScriptPrefixLegalArgs" , callerScriptPrefixLegalArgs ) ; callerScriptOptionsMap . put ( "callerScriptPrefixArgValueTypes" , callerScriptPrefixArgValueTypes ) ; callerScriptOptionsMap . put ( "callerScriptPrefixMutexArgs" , callerScriptPrefixMutexArgs ) ; callerScriptOptionsMap . put ( "callerScriptPrefixAliasArgs" , callerScriptPrefixAliasArgs ) ; callerScriptOptionsMap . put ( "callerScriptPrefixMinOccurrences" , callerScriptPrefixMinOccurrences ) ; callerScriptOptionsMap . put ( "callerScriptPrefixMaxOccurrences" , callerScriptPrefixMaxOccurrences ) ; callerScriptOptionsMap . put ( "callerScriptPostfixLegalArgs" , callerScriptPostfixLegalArgs ) ; callerScriptOptionsMap . put ( "callerScriptPostfixArgValueTypes" , callerScriptPostfixArgValueTypes ) ; callerScriptOptionsMap . put ( "callerScriptPostfixMutexArgs" , callerScriptPostfixMutexArgs ) ; callerScriptOptionsMap . put ( "callerScriptPostfixAliasArgs" , callerScriptPostfixAliasArgs ) ; callerScriptOptionsMap . put ( "callerScriptPostfixMinOccurrences" , callerScriptPostfixMinOccurrences ) ; callerScriptOptionsMap . put ( "callerScriptPostfixMaxOccurrences" , callerScriptPostfixMaxOccurrences ) ; if ( hasCallerScriptPostfixArgs ) { callerScriptOptionsMap . put ( "hasCallerScriptPostfixArgs" , "true" ) ; } else { callerScriptOptionsMap . put ( "hasCallerScriptPostfixArgs" , "false" ) ; } rootMap . put ( "callerScriptOptions" , callerScriptOptionsMap ) ; // Get or create a template
final Template template = cfg . getTemplate ( getIndexTemplateName ( ) ) ; // Create the output file
final File indexFile = new File ( getDestinationDir ( ) , getIndexBaseFileName ( ) + "." + getIndexFileExtension ( ) ) ; // Run the template and merge in the data
try ( final FileOutputStream fileOutStream = new FileOutputStream ( indexFile ) ; final OutputStreamWriter outWriter = new OutputStreamWriter ( fileOutStream ) ) { template . process ( rootMap , outWriter ) ; } catch ( TemplateException e ) { throw new DocException ( "Freemarker Template Exception during documentation index creation" , e ) ; }
|
public class SelectorImpl { /** * the constructor of Operator ( to decode operands ) . */
static Selector decodeSubtree ( ObjectInput buf ) throws ClassNotFoundException , IOException { } }
|
int type = buf . readByte ( ) ; if ( type < IDENTIFIER ) return new LiteralImpl ( buf ) ; else if ( type == IDENTIFIER ) return new IdentifierImpl ( buf ) ; else if ( type == LIKE ) return new LikeOperatorImpl ( buf ) ; else return new OperatorImpl ( buf ) ;
|
public class BsFileConfigCA { public void filter ( String name , EsAbstractConditionQuery . OperatorCall < BsFileConfigCQ > queryLambda , ConditionOptionCall < FilterAggregationBuilder > opLambda , OperatorCall < BsFileConfigCA > aggsLambda ) { } }
|
FileConfigCQ cq = new FileConfigCQ ( ) ; if ( queryLambda != null ) { queryLambda . callback ( cq ) ; } FilterAggregationBuilder builder = regFilterA ( name , cq . getQuery ( ) ) ; if ( opLambda != null ) { opLambda . callback ( builder ) ; } if ( aggsLambda != null ) { FileConfigCA ca = new FileConfigCA ( ) ; aggsLambda . callback ( ca ) ; ca . getAggregationBuilderList ( ) . forEach ( builder :: subAggregation ) ; }
|
public class VecmathUtil { /** * Create unit vectors from one atom to all other provided atoms .
* @ param fromAtom reference atom ( will become 0,0)
* @ param toAtoms list of to atoms
* @ return unit vectors */
static List < Vector2d > newUnitVectors ( final IAtom fromAtom , final List < IAtom > toAtoms ) { } }
|
final List < Vector2d > unitVectors = new ArrayList < Vector2d > ( toAtoms . size ( ) ) ; for ( final IAtom toAtom : toAtoms ) { unitVectors . add ( newUnitVector ( fromAtom . getPoint2d ( ) , toAtom . getPoint2d ( ) ) ) ; } return unitVectors ;
|
public class version_matrix_status { /** * Use this API to fetch filtered set of version _ matrix _ status resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static version_matrix_status [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
|
version_matrix_status obj = new version_matrix_status ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; version_matrix_status [ ] response = ( version_matrix_status [ ] ) obj . getfiltered ( service , option ) ; return response ;
|
public class Visualizer { /** * Returns the entry point of the PE if present and valid , otherwise absent .
* A valid entry point is one within a section .
* @ return entry point optional if present , absent otherwise */
private Optional < Long > getEntryPoint ( ) { } }
|
long rva = data . getOptionalHeader ( ) . get ( StandardFieldEntryKey . ADDR_OF_ENTRY_POINT ) ; Optional < SectionHeader > section = new SectionLoader ( data ) . maybeGetSectionHeaderByRVA ( rva ) ; if ( section . isPresent ( ) ) { long phystovirt = section . get ( ) . get ( SectionHeaderKey . VIRTUAL_ADDRESS ) - section . get ( ) . get ( SectionHeaderKey . POINTER_TO_RAW_DATA ) ; return Optional . of ( rva - phystovirt ) ; } return Optional . absent ( ) ;
|
public class AmazonDirectConnectClient { /** * Lists the virtual private gateways owned by the AWS account .
* You can create one or more AWS Direct Connect private virtual interfaces linked to a virtual private gateway .
* @ param describeVirtualGatewaysRequest
* @ return Result of the DescribeVirtualGateways operation returned by the service .
* @ throws DirectConnectServerException
* A server - side error occurred .
* @ throws DirectConnectClientException
* One or more parameters are not valid .
* @ sample AmazonDirectConnect . DescribeVirtualGateways
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / DescribeVirtualGateways "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeVirtualGatewaysResult describeVirtualGateways ( DescribeVirtualGatewaysRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDescribeVirtualGateways ( request ) ;
|
public class InternalSARLParser { /** * $ ANTLR start synpred55 _ InternalSARL */
public final void synpred55_InternalSARL_fragment ( ) throws RecognitionException { } }
|
// InternalSARL . g : 15431:4 : ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START )
// InternalSARL . g :
{ if ( ( input . LA ( 1 ) >= RULE_STRING && input . LA ( 1 ) <= RULE_RICH_TEXT_START ) || ( input . LA ( 1 ) >= RULE_HEX && input . LA ( 1 ) <= RULE_DECIMAL ) || input . LA ( 1 ) == 25 || ( input . LA ( 1 ) >= 28 && input . LA ( 1 ) <= 29 ) || input . LA ( 1 ) == 36 || ( input . LA ( 1 ) >= 39 && input . LA ( 1 ) <= 40 ) || ( input . LA ( 1 ) >= 42 && input . LA ( 1 ) <= 45 ) || ( input . LA ( 1 ) >= 48 && input . LA ( 1 ) <= 49 ) || input . LA ( 1 ) == 51 || input . LA ( 1 ) == 55 || ( input . LA ( 1 ) >= 60 && input . LA ( 1 ) <= 63 ) || ( input . LA ( 1 ) >= 67 && input . LA ( 1 ) <= 68 ) || ( input . LA ( 1 ) >= 73 && input . LA ( 1 ) <= 75 ) || ( input . LA ( 1 ) >= 78 && input . LA ( 1 ) <= 96 ) || input . LA ( 1 ) == 106 || input . LA ( 1 ) == 129 || ( input . LA ( 1 ) >= 131 && input . LA ( 1 ) <= 140 ) ) { input . consume ( ) ; state . errorRecovery = false ; state . failed = false ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } }
|
public class servicegroup_servicegroupentitymonbindings_binding { /** * Use this API to fetch servicegroup _ servicegroupentitymonbindings _ binding resources of given name . */
public static servicegroup_servicegroupentitymonbindings_binding [ ] get ( nitro_service service , String servicegroupname ) throws Exception { } }
|
servicegroup_servicegroupentitymonbindings_binding obj = new servicegroup_servicegroupentitymonbindings_binding ( ) ; obj . set_servicegroupname ( servicegroupname ) ; servicegroup_servicegroupentitymonbindings_binding response [ ] = ( servicegroup_servicegroupentitymonbindings_binding [ ] ) obj . get_resources ( service ) ; return response ;
|
public class HelloConfiguration { /** * Returns a custom { @ link ClientFactory } with TLS certificate validation disabled ,
* which means any certificate received from the server will be accepted without any verification .
* It is used for an example which makes the client send an HTTPS request to the server running
* on localhost with a self - signed certificate . Do NOT use the { @ link InsecureTrustManagerFactory }
* in production . */
@ Bean public ClientFactory clientFactory ( ) { } }
|
return new ClientFactoryBuilder ( ) . sslContextCustomizer ( b -> b . trustManager ( InsecureTrustManagerFactory . INSTANCE ) ) . build ( ) ;
|
public class CLI { /** * Set up the TCP socket for annotation . */
public final void server ( ) { } }
|
// load parameters into a properties
String port = parsedArguments . getString ( "port" ) ; String model = parsedArguments . getString ( "model" ) ; String lemmatizerModel = parsedArguments . getString ( "lemmatizerModel" ) ; final String allMorphology = Boolean . toString ( this . parsedArguments . getBoolean ( "allMorphology" ) ) ; final String multiwords = Boolean . toString ( this . parsedArguments . getBoolean ( "multiwords" ) ) ; final String dictag = Boolean . toString ( this . parsedArguments . getBoolean ( "dictag" ) ) ; String outputFormat = parsedArguments . getString ( "outputFormat" ) ; // language parameter
String lang = parsedArguments . getString ( "language" ) ; Properties serverproperties = setServerProperties ( port , model , lemmatizerModel , lang , multiwords , dictag , outputFormat , allMorphology ) ; new StatisticalTaggerServer ( serverproperties ) ;
|
public class AccountManager { /** * Same as the other version with username being sent null .
* @ param masterPassword master password to use
* @ param inputText the input text
* @ return the generated password
* @ see # generatePassword ( CharSequence , String , String ) */
public SecureCharArray generatePassword ( CharSequence masterPassword , String inputText ) { } }
|
return generatePassword ( masterPassword , inputText , null ) ;
|
public class Reporting { /** * Starts all reporters .
* @ return The number of configured reporters .
* @ throws Exception on start error . */
public int start ( ) throws Exception { } }
|
if ( isStarted . compareAndSet ( false , true ) ) { try { for ( Reporter reporter : reporters ) { reporter . start ( ) ; } } catch ( Exception e ) { stop ( ) ; throw e ; } } return reporters . size ( ) ;
|
public class CDATASection { /** * Meant to be called only from within the engine */
static CDATASection asEngineCDATASection ( final ICDATASection cdataSection ) { } }
|
if ( cdataSection instanceof CDATASection ) { return ( CDATASection ) cdataSection ; } return new CDATASection ( cdataSection . getContent ( ) , cdataSection . getTemplateName ( ) , cdataSection . getLine ( ) , cdataSection . getCol ( ) ) ;
|
public class QueryParameterValue { /** * Creates a { @ code QueryParameterValue } object with a type of ARRAY the given array element type . */
public static < T > QueryParameterValue array ( T [ ] array , StandardSQLTypeName type ) { } }
|
List < QueryParameterValue > listValues = new ArrayList < > ( ) ; for ( T obj : array ) { listValues . add ( QueryParameterValue . of ( obj , type ) ) ; } return QueryParameterValue . newBuilder ( ) . setArrayValues ( listValues ) . setType ( StandardSQLTypeName . ARRAY ) . setArrayType ( type ) . build ( ) ;
|
public class Values { /** * Retrieve values . */
@ Override @ Path ( "/{ownerType}/{ownerId}" ) @ ApiOperation ( value = "Retrieve values for an ownerType and ownerId" , notes = "Response is a generic JSON object with names/values." ) public JSONObject get ( String path , Map < String , String > headers ) throws ServiceException , JSONException { } }
|
Map < String , String > parameters = getParameters ( headers ) ; String ownerType = getSegment ( path , 1 ) ; if ( ownerType == null ) // fall back to parameter
ownerType = parameters . get ( "ownerType" ) ; if ( ownerType == null ) throw new ServiceException ( "Missing path segment: {ownerType}" ) ; String ownerId = getSegment ( path , 2 ) ; if ( ownerId == null ) // fall back to parameter
ownerId = parameters . get ( "ownerId" ) ; if ( ownerId == null ) throw new ServiceException ( "Missing path segment: {ownerId}" ) ; JSONObject valuesJson = new JsonObject ( ) ; Map < String , String > values = ServiceLocator . getWorkflowServices ( ) . getValues ( ownerType , ownerId ) ; if ( values != null ) { for ( String name : values . keySet ( ) ) valuesJson . put ( name , values . get ( name ) ) ; } return valuesJson ;
|
public class QueryParameters { /** * When called the first time , all ids are added to the filter .
* When called two or more times , the provided id ' s are and ' ed with the those provided in the previous lists .
* @ param ids The ids of the elements that should be searched in this query . */
public void addIds ( Collection < String > ids ) { } }
|
if ( this . ids == null ) { this . ids = new ArrayList < > ( ids ) ; } else { this . ids . retainAll ( ids ) ; if ( this . ids . isEmpty ( ) ) { LOGGER . warn ( "No ids remain after addIds. All elements will be filtered out." ) ; } }
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AttributeDesignatorType } { @ code > } } */
@ XmlElementDecl ( namespace = "urn:oasis:names:tc:xacml:2.0:policy:schema:os" , name = "ResourceAttributeDesignator" , substitutionHeadNamespace = "urn:oasis:names:tc:xacml:2.0:policy:schema:os" , substitutionHeadName = "Expression" ) public JAXBElement < AttributeDesignatorType > createResourceAttributeDesignator ( AttributeDesignatorType value ) { } }
|
return new JAXBElement < AttributeDesignatorType > ( _ResourceAttributeDesignator_QNAME , AttributeDesignatorType . class , null , value ) ;
|
public class QueryParameterValue { /** * Creates a { @ code QueryParameterValue } object with a type of ARRAY , and an array element type
* based on the given class . */
public static < T > QueryParameterValue array ( T [ ] array , Class < T > clazz ) { } }
|
return array ( array , classToType ( clazz ) ) ;
|
public class ReplyFactory { /** * Adds a Receipt Template to the response .
* @ param recipientName
* the recipient ' s name .
* @ param orderNumber
* the order number . Must be unique for each user .
* @ param currency
* the currency for the price . It can ' t be empty . The currency
* must be a three digit ISO - 4217-3 code in format [ A - Z ] { 3 } . For
* more information see < a href =
* " https : / / developers . facebook . com / docs / payments / reference / supportedcurrencies "
* > Facebook ' s currency support < / a >
* @ param paymentMethod
* the payment method details . This can be a custom string . ex :
* " Visa 1234 " . You may insert an arbitrary string here but we
* recommend providing enough information for the person to
* decipher which payment method and account they used ( e . g . , the
* name of the payment method and partial account number ) .
* @ return a builder for the response .
* @ see < a href =
* " https : / / developers . facebook . com / docs / messenger - platform / send - api - reference / receipt - template "
* > Facebook ' s Messenger Receipt Template Documentation < / a > */
public static ReceiptTemplateBuilder addAirlineItineraryTemplate ( String recipientName , String orderNumber , String currency , String paymentMethod ) { } }
|
return new ReceiptTemplateBuilder ( recipientName , orderNumber , currency , paymentMethod ) ;
|
public class VectorMath { /** * Adds two { @ code Vector } s with some scalar weight for each { @ code Vector } .
* @ param vector1 The vector values should be added to .
* @ param weight1 The weight of values in { @ code vector1}
* @ param vector2 The vector values that should be added to { @ code vector1}
* @ param weight2 The weight of values in { @ code vector2}
* @ param { @ code vector1} */
public static Vector addWithScalars ( Vector vector1 , double weight1 , Vector vector2 , double weight2 ) { } }
|
if ( vector2 . length ( ) != vector1 . length ( ) ) throw new IllegalArgumentException ( "Vectors of different sizes cannot be added" ) ; int length = vector2 . length ( ) ; for ( int i = 0 ; i < length ; ++ i ) { double value = vector1 . getValue ( i ) . doubleValue ( ) * weight1 + vector2 . getValue ( i ) . doubleValue ( ) * weight2 ; vector1 . set ( i , value ) ; } return vector1 ;
|
public class RedundentExprEliminator { /** * Assert that the expression is a LocPathIterator , and , if
* not , try to give some diagnostic info . */
private final void assertIsLocPathIterator ( Expression expr1 , ExpressionOwner eo ) throws RuntimeException { } }
|
if ( ! ( expr1 instanceof LocPathIterator ) ) { String errMsg ; if ( expr1 instanceof Variable ) { errMsg = "Programmer's assertion: expr1 not an iterator: " + ( ( Variable ) expr1 ) . getQName ( ) ; } else { errMsg = "Programmer's assertion: expr1 not an iterator: " + expr1 . getClass ( ) . getName ( ) ; } throw new RuntimeException ( errMsg + ", " + eo . getClass ( ) . getName ( ) + " " + expr1 . exprGetParent ( ) ) ; }
|
public class HttpOutboundLink { /** * Query whether this outbound link is still actively connected to the
* target host . If this returns true , then the link is expected to be
* still valid for continued use . If this returns false , then the
* connection is inactive and the caller must close down the connection
* and not send / read anymore data , as any attempts to do so will result
* in errors .
* @ return boolean */
public boolean isConnected ( ) { } }
|
// if we haven ' t fully read the incoming message then this
// connection should still be valid
if ( ! this . myInterface . isIncomingMessageFullyRead ( ) ) { return true ; } try { // if the immediate TCP read works then this connection is
// still connected
if ( null == this . myInterface . getTSC ( ) . getReadInterface ( ) . getBuffer ( ) ) { // no read buffer currently exists , use JITAllocate
this . myInterface . getTSC ( ) . getReadInterface ( ) . setJITAllocateSize ( this . myInterface . getHttpConfig ( ) . getIncomingHdrBufferSize ( ) ) ; } this . myInterface . getTSC ( ) . getReadInterface ( ) . read ( 0 , 0 ) ; return true ; } catch ( IOException e ) { // No FFDC required
// if an exception happens , then this connection is dead
return false ; }
|
public class KeyUtil { /** * 读取Certification文件 < br >
* Certification为证书文件 < br >
* see : http : / / snowolf . iteye . com / blog / 391931
* @ param type 类型 , 例如X . 509
* @ param in { @ link InputStream } 如果想从文件读取 . cer文件 , 使用 { @ link FileUtil # getInputStream ( java . io . File ) } 读取
* @ param password 密码
* @ param alias 别名
* @ return { @ link KeyStore }
* @ since 4.4.1 */
public static Certificate readCertificate ( String type , InputStream in , char [ ] password , String alias ) { } }
|
final KeyStore keyStore = readKeyStore ( type , in , password ) ; try { return keyStore . getCertificate ( alias ) ; } catch ( KeyStoreException e ) { throw new CryptoException ( e ) ; }
|
public class CleverTapAPI { /** * If you want to stop recorded events from being sent to the server , use this method to set the SDK instance to offline .
* Once offline , events will be recorded and queued locally but will not be sent to the server until offline is disabled .
* Calling this method again with offline set to false will allow events to be sent to server and the SDK instance will immediately attempt to send events that have been queued while offline .
* @ param value boolean , true sets the sdk offline , false sets the sdk back online */
@ SuppressWarnings ( { } }
|
"unused" , "WeakerAccess" } ) public void setOffline ( boolean value ) { offline = value ; if ( offline ) { getConfigLogger ( ) . debug ( getAccountId ( ) , "CleverTap Instance has been set to offline, won't send events queue" ) ; } else { getConfigLogger ( ) . debug ( getAccountId ( ) , "CleverTap Instance has been set to online, sending events queue" ) ; flush ( ) ; }
|
public class ServerCache { /** * Declarative Services method to activate this component . Best practice : this should be a protected method , not
* public or private
* @ param context
* context for this component */
@ Activate protected void activate ( ComponentContext context ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "activate" , context ) ;
|
public class PiwikRequest { /** * Set a stored parameter that is a boolean . This value will be stored as " 1"
* for true and " 0 " for false .
* @ param key the parameter ' s key
* @ param value the parameter ' s value . Removes the parameter if null */
private void setBooleanParameter ( String key , Boolean value ) { } }
|
if ( value == null ) { parameters . remove ( key ) ; } else if ( value ) { parameters . put ( key , 1 ) ; } else { parameters . put ( key , 0 ) ; }
|
public class SetupUtil { /** * check a list of bundles ( symbolic name , version ) for the right version and active state
* @ param ctx the package install context
* @ param bundlesToCheck the ' list ' to check ; key : symbolic name , value : version
* @ param waitToStartSeconds the seconds to wait to check start ; must be greater than 1
* @ param timeoutSeconds the timeout for the bundle check in seconds
* @ throws PackageException if the check fails */
public static void checkBundles ( InstallContext ctx , Map < String , String > bundlesToCheck , int waitToStartSeconds , int timeoutSeconds ) throws PackageException { } }
|
try { // wait to give the bundle installer a chance to install bundles
Thread . sleep ( waitToStartSeconds * 1000 ) ; } catch ( InterruptedException ignore ) { } LOG . info ( "Check bundles..." ) ; BundleContext bundleContext = FrameworkUtil . getBundle ( ctx . getSession ( ) . getClass ( ) ) . getBundleContext ( ) ; int ready = 0 ; for ( int i = 0 ; i < timeoutSeconds ; i ++ ) { ready = 0 ; for ( Bundle bundle : bundleContext . getBundles ( ) ) { String version = bundlesToCheck . get ( bundle . getSymbolicName ( ) ) ; if ( version != null && version . equals ( bundle . getVersion ( ) . toString ( ) ) && bundle . getState ( ) == Bundle . ACTIVE ) { ready ++ ; } } if ( ready == bundlesToCheck . size ( ) ) { break ; } try { Thread . sleep ( 1000 ) ; } catch ( InterruptedException ignore ) { } } if ( ready < bundlesToCheck . size ( ) ) { LOG . error ( "Checked bundles not ready - installation failed!" ) ; throw new PackageException ( "bundles not ready" ) ; } else { LOG . info ( "Checked bundles are up and ready." ) ; }
|
public class JarScanner { /** * Scans the specified jars for packages and returns a list of all packages found in that jar . The scanner makes no
* distinction between packages and folders .
* @ return a collection of all package names found in the jar */
public Collection < String > scanPackages ( ) { } }
|
return scanJar ( p -> Files . isDirectory ( p ) && ! isIgnored ( p . toAbsolutePath ( ) ) ) ;
|
public class DocBookXMLPreProcessor { /** * Insert a itemized list into the start of the topic , below the title with any PREVIOUS relationships that exists for the
* Spec Topic . The title for the list is set to " Previous Step ( s ) in < TOPIC _ PARENT _ NAME > " .
* @ param topic The topic to process the injection for .
* @ param doc The DOM Document object that represents the topics XML .
* @ param useFixedUrls Whether fixed URL ' s should be used in the injected links . */
public void processPrevRelationshipInjections ( final SpecTopic topic , final Document doc , final boolean useFixedUrls ) { } }
|
if ( topic . getPrevTopicRelationships ( ) . isEmpty ( ) ) return ; // Get the title element so that it can be used later to add the prev topic node
Element titleEle = null ; final NodeList titleList = doc . getDocumentElement ( ) . getElementsByTagName ( "title" ) ; for ( int i = 0 ; i < titleList . getLength ( ) ; i ++ ) { if ( titleList . item ( i ) . getParentNode ( ) . equals ( doc . getDocumentElement ( ) ) ) { titleEle = ( Element ) titleList . item ( i ) ; break ; } } if ( titleEle != null ) { // Attempt to get the previous topic and process it
final List < TopicRelationship > prevList = topic . getPrevTopicRelationships ( ) ; // Create the paragraph / itemizedlist and list of previous relationships .
final Element rootEle = doc . createElement ( "itemizedlist" ) ; rootEle . setAttribute ( "role" , ROLE_PROCESS_PREVIOUS_ITEMIZED_LIST ) ; // Create the title
final Element linkTitleEle = doc . createElement ( "title" ) ; linkTitleEle . setAttribute ( "role" , ROLE_PROCESS_PREVIOUS_TITLE ) ; final String translatedString ; if ( prevList . size ( ) > 1 ) { final String previousStepsTranslation = translations . getString ( PREVIOUS_STEPS_PROPERTY ) ; translatedString = previousStepsTranslation == null ? DEFAULT_PREVIOUS_STEPS : previousStepsTranslation ; } else { final String previousStepTranslation = translations . getString ( PREVIOUS_STEP_PROPERTY ) ; translatedString = previousStepTranslation == null ? DEFAULT_PREVIOUS_STEP : previousStepTranslation ; } /* * The translated String will have a format marker to specify where the link should be placed . So we need to split
* the translated string on that marker and add content where it should be . */
String [ ] split = translatedString . split ( "%s" ) ; // Add the first part of the translated string if any exists
if ( ! split [ 0 ] . trim ( ) . isEmpty ( ) ) { linkTitleEle . appendChild ( doc . createTextNode ( split [ 0 ] ) ) ; } // Create the title link
final Element titleXrefItem = doc . createElement ( "link" ) ; final Level level = ( Level ) topic . getParent ( ) ; if ( level . getTranslatedTitle ( ) != null && ! level . getTranslatedTitle ( ) . isEmpty ( ) ) { titleXrefItem . setTextContent ( level . getTranslatedTitle ( ) ) ; } else { titleXrefItem . setTextContent ( level . getTitle ( ) ) ; } titleXrefItem . setAttribute ( "linkend" , ( ( Level ) topic . getParent ( ) ) . getUniqueLinkId ( useFixedUrls ) ) ; titleXrefItem . setAttribute ( "xrefstyle" , ROLE_PROCESS_PREVIOUS_TITLE_LINK ) ; linkTitleEle . appendChild ( titleXrefItem ) ; // Add the last part of the translated string if any exists
if ( split . length > 1 && ! split [ 1 ] . trim ( ) . isEmpty ( ) ) { linkTitleEle . appendChild ( doc . createTextNode ( split [ 1 ] ) ) ; } rootEle . appendChild ( linkTitleEle ) ; for ( final TopicRelationship prev : prevList ) { final Element prevEle = doc . createElement ( "para" ) ; final SpecTopic prevTopic = prev . getSecondaryRelationship ( ) ; // Add the previous element to either the list or paragraph
// Create the link element
final Element xrefItem = doc . createElement ( "xref" ) ; xrefItem . setAttribute ( "linkend" , prevTopic . getUniqueLinkId ( useFixedUrls ) ) ; xrefItem . setAttribute ( "xrefstyle" , ROLE_PROCESS_PREVIOUS_LINK ) ; prevEle . appendChild ( xrefItem ) ; final Element listitemEle = doc . createElement ( "listitem" ) ; listitemEle . setAttribute ( "role" , ROLE_PROCESS_PREVIOUS_LISTITEM ) ; listitemEle . appendChild ( prevEle ) ; rootEle . appendChild ( listitemEle ) ; } // Insert the node after the title node
Node nextNode = titleEle . getNextSibling ( ) ; while ( nextNode != null && nextNode . getNodeType ( ) != Node . ELEMENT_NODE && nextNode . getNodeType ( ) != Node . COMMENT_NODE ) { nextNode = nextNode . getNextSibling ( ) ; } doc . getDocumentElement ( ) . insertBefore ( rootEle , nextNode ) ; }
|
public class GVRSceneObject { /** * Visits all the components attached to
* the descendants of this scene object .
* The ComponentVisitor . visit function is called for every
* component of each descendant until it returns false .
* This allows you to traverse the scene graph safely without copying it .
* This method gives much better performance than iterating
* children ( ) or getChildren ( ) .
* @ param visitor ComponentVisitor interface implementing " visit " function
* @ see # children ( )
* @ see # getChildren ( )
* @ see SceneVisitor
* @ see # forAllDescendants ( SceneVisitor )
* @ see # getComponent ( long ) */
public void forAllComponents ( ComponentVisitor visitor ) { } }
|
synchronized ( mComponents ) { for ( GVRComponent comp : mComponents . values ( ) ) { if ( ! visitor . visit ( comp ) ) { return ; } } } synchronized ( mChildren ) { for ( int i = 0 ; i < mChildren . size ( ) ; ++ i ) { GVRSceneObject child = mChildren . get ( i ) ; child . forAllComponents ( visitor ) ; } }
|
public class ExecutorUtil { /** * 执行自动生成的 count 查询
* @ param dialect
* @ param executor
* @ param countMs
* @ param parameter
* @ param boundSql
* @ param rowBounds
* @ param resultHandler
* @ return
* @ throws SQLException */
public static Long executeAutoCount ( Dialect dialect , Executor executor , MappedStatement countMs , Object parameter , BoundSql boundSql , RowBounds rowBounds , ResultHandler resultHandler ) throws SQLException { } }
|
Map < String , Object > additionalParameters = getAdditionalParameter ( boundSql ) ; // 创建 count 查询的缓存 key
CacheKey countKey = executor . createCacheKey ( countMs , parameter , RowBounds . DEFAULT , boundSql ) ; // 调用方言获取 count sql
String countSql = dialect . getCountSql ( countMs , boundSql , parameter , rowBounds , countKey ) ; // countKey . update ( countSql ) ;
BoundSql countBoundSql = new BoundSql ( countMs . getConfiguration ( ) , countSql , boundSql . getParameterMappings ( ) , parameter ) ; // 当使用动态 SQL 时 , 可能会产生临时的参数 , 这些参数需要手动设置到新的 BoundSql 中
for ( String key : additionalParameters . keySet ( ) ) { countBoundSql . setAdditionalParameter ( key , additionalParameters . get ( key ) ) ; } // 执行 count 查询
Object countResultList = executor . query ( countMs , parameter , RowBounds . DEFAULT , resultHandler , countKey , countBoundSql ) ; Long count = ( Long ) ( ( List ) countResultList ) . get ( 0 ) ; return count ;
|
public class Split { /** * Gets the value of the flows property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the flows property .
* For example , to add a new item , do as follows :
* < pre >
* getFlows ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Flow } */
@ Generated ( value = "com.ibm.jtc.jax.tools.xjc.Driver" , date = "2014-06-11T05:49:00-04:00" , comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-" ) public List < Flow > getFlows ( ) { } }
|
if ( flows == null ) { flows = new ArrayList < Flow > ( ) ; } return this . flows ;
|
public class PersonGroupsImpl { /** * Create a new person group with specified personGroupId , name and user - provided userData .
* @ param personGroupId Id referencing a particular person group .
* @ param createOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws APIErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void create ( String personGroupId , CreatePersonGroupsOptionalParameter createOptionalParameter ) { } }
|
createWithServiceResponseAsync ( personGroupId , createOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class CommerceWarehousePersistenceImpl { /** * Returns the first commerce warehouse in the ordered set where groupId = & # 63 ; and commerceCountryId = & # 63 ; and primary = & # 63 ; .
* @ param groupId the group ID
* @ param commerceCountryId the commerce country ID
* @ param primary the primary
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce warehouse
* @ throws NoSuchWarehouseException if a matching commerce warehouse could not be found */
@ Override public CommerceWarehouse findByG_C_P_First ( long groupId , long commerceCountryId , boolean primary , OrderByComparator < CommerceWarehouse > orderByComparator ) throws NoSuchWarehouseException { } }
|
CommerceWarehouse commerceWarehouse = fetchByG_C_P_First ( groupId , commerceCountryId , primary , orderByComparator ) ; if ( commerceWarehouse != null ) { return commerceWarehouse ; } StringBundler msg = new StringBundler ( 8 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", commerceCountryId=" ) ; msg . append ( commerceCountryId ) ; msg . append ( ", primary=" ) ; msg . append ( primary ) ; msg . append ( "}" ) ; throw new NoSuchWarehouseException ( msg . toString ( ) ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.