signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FactoryAssociation { /** * Returns an algorithm for associating features together which uses a brute force greedy algorithm .
* See { @ link AssociateGreedy } for details .
* @ param score Computes the fit score between two features .
* @ param maxError Maximum allowed error / fit score between two features . To disable set to Double . MAX _ VALUE
* @ param backwardsValidation If true associations are validated by associating in the reverse direction . If the
* forward and reverse matches fit an association is excepted .
* @ param < D > Data structure being associated
* @ return AssociateDescription */
public static < D > AssociateDescription < D > greedy ( ScoreAssociation < D > score , double maxError , boolean backwardsValidation ) { } } | AssociateGreedyBase < D > alg ; if ( BoofConcurrency . USE_CONCURRENT ) { alg = new AssociateGreedy_MT < > ( score , backwardsValidation ) ; } else { alg = new AssociateGreedy < > ( score , backwardsValidation ) ; } alg . setMaxFitError ( maxError ) ; return new WrapAssociateGreedy < > ( alg ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIncludeTile ( ) { } } | if ( includeTileEClass == null ) { includeTileEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 401 ) ; } return includeTileEClass ; |
public class JvmTypesBuilder { /** * Attaches the given documentation of the source element to the given jvmElement .
* The documentation is computed lazily . */
public void copyDocumentationTo ( /* @ Nullable */
final EObject source , /* @ Nullable */
JvmIdentifiableElement jvmElement ) { } } | if ( source == null || jvmElement == null ) return ; DocumentationAdapter documentationAdapter = new DocumentationAdapter ( ) { private boolean computed = false ; @ Override public String getDocumentation ( ) { if ( computed ) { return super . getDocumentation ( ) ; } String result = JvmTypesBuilder . this . getDocumentation ( source ) ; setDocumentation ( result ) ; return result ; } @ Override public void setDocumentation ( String documentation ) { computed = true ; super . setDocumentation ( documentation ) ; } } ; jvmElement . eAdapters ( ) . add ( documentationAdapter ) ; |
public class CPRuleLocalServiceBaseImpl { /** * Deletes the cp rule from the database . Also notifies the appropriate model listeners .
* @ param cpRule the cp rule
* @ return the cp rule that was removed
* @ throws PortalException */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CPRule deleteCPRule ( CPRule cpRule ) throws PortalException { } } | return cpRulePersistence . remove ( cpRule ) ; |
public class VisitState { /** * Puts this visit state in its entry , if it not empty .
* < p > This method is called only by { @ link Workbench # release ( VisitState ) } .
* Note that the associated entry is < em > not < / em > put back on the workbench . Use
* { @ link WorkbenchEntry # putOnWorkbenchIfNotEmpty ( Workbench ) } for that purpose .
* < ul >
* < li > Preconditions : { @ link # workbenchEntry } & ne ; { @ code null } , { @ link # acquired } .
* < li > Postconditions : not { @ link # acquired } .
* < / ul > */
public synchronized void putInEntryIfNotEmpty ( ) { } } | assert workbenchEntry != null : this ; assert acquired : this ; assert workbenchEntry . acquired : workbenchEntry ; if ( ! isEmpty ( ) ) workbenchEntry . add ( this ) ; acquired = false ; |
public class FindNonProgressingOperationHandler { /** * Separate from other findNonProgressingOp variant to allow unit testing without needing a mock OperationContext */
static String findNonProgressingOp ( Resource resource , boolean forServer , long timeout ) throws OperationFailedException { } } | Resource . ResourceEntry nonProgressing = null ; for ( Resource . ResourceEntry child : resource . getChildren ( ACTIVE_OPERATION ) ) { ModelNode model = child . getModel ( ) ; if ( model . get ( EXCLUSIVE_RUNNING_TIME ) . asLong ( ) > timeout ) { nonProgressing = child ; ControllerLogger . MGMT_OP_LOGGER . tracef ( "non-progressing op: %s" , nonProgressing . getModel ( ) ) ; break ; } } if ( nonProgressing != null && ! forServer ) { // WFCORE - 263
// See if the op is non - progressing because it ' s the HC op waiting for commit
// from the DC while other ops ( i . e . ops proxied to our servers ) associated
// with the same domain - uuid are not completing
ModelNode model = nonProgressing . getModel ( ) ; if ( model . get ( DOMAIN_ROLLOUT ) . asBoolean ( ) && OperationContext . ExecutionStatus . COMPLETING . toString ( ) . equals ( model . get ( EXECUTION_STATUS ) . asString ( ) ) && model . hasDefined ( DOMAIN_UUID ) ) { ControllerLogger . MGMT_OP_LOGGER . trace ( "Potential domain rollout issue" ) ; String domainUUID = model . get ( DOMAIN_UUID ) . asString ( ) ; Set < String > relatedIds = null ; List < Resource . ResourceEntry > relatedExecutingOps = null ; for ( Resource . ResourceEntry activeOp : resource . getChildren ( ACTIVE_OPERATION ) ) { if ( nonProgressing . getName ( ) . equals ( activeOp . getName ( ) ) ) { continue ; // ignore self
} ModelNode opModel = activeOp . getModel ( ) ; if ( opModel . hasDefined ( DOMAIN_UUID ) && domainUUID . equals ( opModel . get ( DOMAIN_UUID ) . asString ( ) ) && opModel . get ( RUNNING_TIME ) . asLong ( ) > timeout ) { if ( relatedIds == null ) { relatedIds = new TreeSet < String > ( ) ; // order these as an aid to unit testing
} relatedIds . add ( activeOp . getName ( ) ) ; // If the op is ExecutionStatus . EXECUTING that means it ' s still EXECUTING on the
// server or a prepare message got lost . It would be COMPLETING if the server
// had sent a prepare message , as that would result in ProxyStepHandler calling completeStep
if ( OperationContext . ExecutionStatus . EXECUTING . toString ( ) . equals ( opModel . get ( EXECUTION_STATUS ) . asString ( ) ) ) { if ( relatedExecutingOps == null ) { relatedExecutingOps = new ArrayList < Resource . ResourceEntry > ( ) ; } relatedExecutingOps . add ( activeOp ) ; ControllerLogger . MGMT_OP_LOGGER . tracef ( "Related executing: %s" , opModel ) ; } else ControllerLogger . MGMT_OP_LOGGER . tracef ( "Related non-executing: %s" , opModel ) ; } else ControllerLogger . MGMT_OP_LOGGER . tracef ( "unrelated: %s" , opModel ) ; } if ( relatedIds != null ) { // There are other ops associated with this domain - uuid that are also not completing
// in the desired time , so we can ' t treat the one holding the lock as the problem .
if ( relatedExecutingOps != null && relatedExecutingOps . size ( ) == 1 ) { // There ' s a single related op that ' s executing for too long . So we can report that one .
// Note that it ' s possible that the same problem exists on other hosts as well
// and that this cancellation will not resolve the overall problem . But , we only
// get here on a slave HC and if the user is invoking this on a slave and not the
// master , we ' ll assume they have a reason for doing that and want us to treat this
// as a problem on this particular host .
nonProgressing = relatedExecutingOps . get ( 0 ) ; } else { // Fail and provide a useful failure message .
throw DomainManagementLogger . ROOT_LOGGER . domainRolloutNotProgressing ( nonProgressing . getName ( ) , timeout , domainUUID , relatedIds ) ; } } } } return nonProgressing == null ? null : nonProgressing . getName ( ) ; |
public class SqlClosureElf { /** * Gets an object using a from clause .
* @ param type The type of the desired object .
* @ param clause The WHERE clause .
* @ param args The arguments for the WHERE clause .
* @ param < T > The type of the object .
* @ return The object or { @ code null } */
public static < T > T objectFromClause ( Class < T > type , String clause , Object ... args ) { } } | return SqlClosure . sqlExecute ( c -> OrmElf . objectFromClause ( c , type , clause , args ) ) ; |
public class ModUniqueIdDissector { @ Override public void dissect ( final Parsable < ? > parsable , final String inputname ) throws DissectionFailure { } } | final ParsedField field = parsable . getParsableField ( INPUT_TYPE , inputname ) ; String fieldValue = field . getValue ( ) . getString ( ) ; if ( fieldValue == null || fieldValue . isEmpty ( ) ) { return ; // Nothing to do here
} UniqueIdRec record = decode ( fieldValue ) ; if ( record == null ) { return ; } if ( wantTime ) { parsable . addDissection ( inputname , "TIME.EPOCH" , "epoch" , record . timestamp ) ; } if ( wantIp ) { parsable . addDissection ( inputname , "IP" , "ip" , record . ipaddrStr ) ; } if ( wantProcessId ) { parsable . addDissection ( inputname , "PROCESSID" , "processid" , record . pid ) ; } if ( wantCounter ) { parsable . addDissection ( inputname , "COUNTER" , "counter" , record . counter ) ; } if ( wantThreadIndex ) { parsable . addDissection ( inputname , "THREAD_INDEX" , "threadindex" , record . threadIndex ) ; } |
public class DcpControlHandler { /** * Once the channel becomes active , start negotiating the dcp control params . */
@ Override public void channelActive ( final ChannelHandlerContext ctx ) throws Exception { } } | controlSettings = dcpControl . getControls ( getServerVersion ( ctx . channel ( ) ) ) . entrySet ( ) . iterator ( ) ; negotiate ( ctx ) ; |
public class Tr { /** * Register the provided class with the trace service and assign it to the
* provided group name . Translated messages will attempt to use the input
* message bundle source .
* @ param aClass
* a valid < code > Class < / code > to register a component for with
* the trace manager . The className is obtained from the Class
* and is used as the name in the registration process .
* @ param group
* the name of the group that the named component is a member of .
* Null is allowed . If null is passed , the name is not added to a
* group . Once added to a group , there is no corresponding
* mechanism to remove a component from a group .
* @ param bundle
* the name of the message properties file to use when providing
* national language support for messages logged by this
* component . All messages for this component must be found in
* this file .
* @ return TraceComponent the < code > TraceComponent < / code > corresponding to
* the name of the specified class . */
public static TraceComponent register ( Class < ? > aClass , String group , String bundle ) { } } | TraceComponent tc = new TraceComponent ( aClass . getName ( ) , aClass , group , bundle ) ; registerTraceComponent ( tc ) ; return tc ; |
public class server { /** * Use this API to add server resources . */
public static base_responses add ( nitro_service client , server resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { server addresources [ ] = new server [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new server ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . ipaddress = resources [ i ] . ipaddress ; addresources [ i ] . domain = resources [ i ] . domain ; addresources [ i ] . translationip = resources [ i ] . translationip ; addresources [ i ] . translationmask = resources [ i ] . translationmask ; addresources [ i ] . domainresolveretry = resources [ i ] . domainresolveretry ; addresources [ i ] . state = resources [ i ] . state ; addresources [ i ] . ipv6address = resources [ i ] . ipv6address ; addresources [ i ] . comment = resources [ i ] . comment ; addresources [ i ] . td = resources [ i ] . td ; } result = add_bulk_request ( client , addresources ) ; } return result ; |
public class DropboxEndpoint { /** * Requests the linked account name .
* @ param dropboxApi the { @ link DropboxAPI } .
* @ return the account name ; or null if not linked . */
private String requestAccountName ( DropboxAPI < AndroidAuthSession > dropboxApi ) { } } | String accountName = null ; if ( dropboxApi != null ) { try { accountName = dropboxApi . accountInfo ( ) . displayName ; } catch ( DropboxException e ) { // Do nothing .
} } return accountName ; |
public class D6Crud { /** * Insert the specified model object into the DB
* @ param modelObjects
* @ return true : DB operation success false : failure */
public boolean execInsert ( D6Model [ ] modelObjects ) { } } | final D6Inex includeExcludeColumnNames = null ; return execInsert ( modelObjects , includeExcludeColumnNames , false ) ; |
public class SCoveragePackageMojo { /** * Creates artifact file containing instrumented classes . */
@ Override public void execute ( ) { } } | if ( "pom" . equals ( project . getPackaging ( ) ) ) { getLog ( ) . info ( "Skipping SCoverage execution for project with packaging type 'pom'" ) ; return ; } if ( skip ) { getLog ( ) . info ( "Skipping Scoverage execution" ) ; return ; } long ts = System . currentTimeMillis ( ) ; SCoverageForkedLifecycleConfigurator . afterForkedLifecycleExit ( project , reactorProjects ) ; long te = System . currentTimeMillis ( ) ; getLog ( ) . debug ( String . format ( "Mojo execution time: %d ms" , te - ts ) ) ; |
public class DataFrameReader { /** * Reads the given file into a table using default options
* Uses converter specified based on given file extension
* Use { @ link # usingOptions ( ReadOptions ) usingOptions } to use non - default options */
public Table file ( File file ) throws IOException { } } | String extension = Files . getFileExtension ( file . getCanonicalPath ( ) ) ; DataReader < ? > reader = registry . getReaderForExtension ( extension ) ; return reader . read ( new Source ( file ) ) ; |
public class NetworkServiceRecordAgent { /** * Create a new NetworkServiceRecord from a NetworkServiceDescriptor .
* @ param id ID of the NetworkServiceDescriptor
* @ param vduVimInstances a HashMap assigning VimInstance names to VirtualDeploymentUnits
* @ param keys an ArrayList of Key names that shall be passed to the NetworkServiceRecord
* @ param configurations a HashMap assigning Configuration objects to VirtualNetworkServiceRecord
* @ param monitoringIp the IP of the monitoring system
* @ return the created NetworkServiceRecord
* @ throws SDKException if the request fails */
@ Help ( help = "Create NetworkServiceRecord from NetworkServiceDescriptor id" ) public NetworkServiceRecord create ( final String id , HashMap < String , ArrayList < String > > vduVimInstances , ArrayList < String > keys , HashMap < String , Configuration > configurations , String monitoringIp ) throws SDKException { } } | HashMap < String , Serializable > jsonBody = new HashMap < > ( ) ; jsonBody . put ( "keys" , keys ) ; jsonBody . put ( "vduVimInstances" , vduVimInstances ) ; jsonBody . put ( "configurations" , configurations ) ; jsonBody . put ( "monitoringIp" , monitoringIp ) ; return ( NetworkServiceRecord ) this . requestPost ( id , jsonBody , NetworkServiceRecord . class ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcBSplineSurface ( ) { } } | if ( ifcBSplineSurfaceEClass == null ) { ifcBSplineSurfaceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 36 ) ; } return ifcBSplineSurfaceEClass ; |
public class TimeZoneFormat { /** * Parse an exemplar location string .
* @ param text the text contains an exemplar location string at the position .
* @ param pos the position .
* @ return The zone ID for the parsed exemplar location . */
private String parseExemplarLocation ( String text , ParsePosition pos ) { } } | int startIdx = pos . getIndex ( ) ; int parsedPos = - 1 ; String tzID = null ; EnumSet < NameType > nameTypes = EnumSet . of ( NameType . EXEMPLAR_LOCATION ) ; Collection < MatchInfo > exemplarMatches = _tznames . find ( text , startIdx , nameTypes ) ; if ( exemplarMatches != null ) { MatchInfo exemplarMatch = null ; for ( MatchInfo match : exemplarMatches ) { if ( startIdx + match . matchLength ( ) > parsedPos ) { exemplarMatch = match ; parsedPos = startIdx + match . matchLength ( ) ; } } if ( exemplarMatch != null ) { tzID = getTimeZoneID ( exemplarMatch . tzID ( ) , exemplarMatch . mzID ( ) ) ; pos . setIndex ( parsedPos ) ; } } if ( tzID == null ) { pos . setErrorIndex ( startIdx ) ; } return tzID ; |
public class NewtonUnconstrained { /** * Hess . step = - Grad */
private DoubleMatrix1D calculateNewtonStep ( DoubleMatrix2D hessX , DoubleMatrix1D gradX ) throws Exception { } } | final KKTSolver kktSolver = new BasicKKTSolver ( ) ; if ( isCheckKKTSolutionAccuracy ( ) ) { kktSolver . setCheckKKTSolutionAccuracy ( isCheckKKTSolutionAccuracy ( ) ) ; kktSolver . setToleranceKKT ( getToleranceKKT ( ) ) ; } kktSolver . setHMatrix ( hessX ) ; kktSolver . setGVector ( gradX ) ; DoubleMatrix1D [ ] sol = kktSolver . solve ( ) ; DoubleMatrix1D step = sol [ 0 ] ; return step ; |
public class URIUtils { /** * Return the parent Path .
* Treat a URI like a directory path and return the parent directory .
* @ param p the path to return a parent reference to
* @ return the parent path of the URI */
public static String parentPath ( String p ) { } } | if ( p == null || URIUtils . SLASH . equals ( p ) ) return null ; int slash = p . lastIndexOf ( '/' , p . length ( ) - 2 ) ; if ( slash >= 0 ) return p . substring ( 0 , slash + 1 ) ; return null ; |
public class nsxmlnamespace { /** * Use this API to fetch filtered set of nsxmlnamespace resources .
* set the filter parameter values in filtervalue object . */
public static nsxmlnamespace [ ] get_filtered ( nitro_service service , filtervalue [ ] filter ) throws Exception { } } | nsxmlnamespace obj = new nsxmlnamespace ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; nsxmlnamespace [ ] response = ( nsxmlnamespace [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class JobTracker { /** * Safe clean - up all data structures at the end of the
* job ( success / failure / killed ) .
* Here we also ensure that for a given user we maintain
* information for only MAX _ COMPLETE _ USER _ JOBS _ IN _ MEMORY jobs
* on the JobTracker .
* @ param job completed job . */
synchronized void finalizeJob ( JobInProgress job ) { } } | // Mark the ' non - running ' tasks for pruning
markCompletedJob ( job ) ; JobEndNotifier . registerNotification ( job . getJobConf ( ) , job . getStatus ( ) ) ; // start the merge of log files
JobID id = job . getStatus ( ) . getJobID ( ) ; // mark the job as completed
try { JobHistory . JobInfo . markCompleted ( id ) ; } catch ( IOException ioe ) { LOG . info ( "Failed to mark job " + id + " as completed!" , ioe ) ; } final JobTrackerInstrumentation metrics = getInstrumentation ( ) ; metrics . finalizeJob ( conf , id ) ; long now = getClock ( ) . getTime ( ) ; // mark the job for cleanup at all the trackers
addJobForCleanup ( id ) ; try { File userFileForJob = new File ( lDirAlloc . getLocalPathToRead ( SUBDIR + "/" + id , conf ) . toString ( ) ) ; if ( userFileForJob != null ) { userFileForJob . delete ( ) ; } } catch ( IOException ioe ) { LOG . info ( "Failed to delete job id mapping for job " + id , ioe ) ; } // add the blacklisted trackers to potentially faulty list
if ( job . getStatus ( ) . getRunState ( ) == JobStatus . SUCCEEDED ) { if ( job . getNoOfBlackListedTrackers ( ) > 0 ) { for ( Entry < String , List < String > > e : job . getBlackListedTrackers ( ) . entrySet ( ) ) { String tt = e . getKey ( ) ; String jobName = job . getJobID ( ) . toString ( ) ; String [ ] exceptions = e . getValue ( ) . toArray ( new String [ 0 ] ) ; faultyTrackers . incrementFaults ( tt , new JobFault ( tt , jobName , exceptions ) ) ; } } } String jobUser = job . getProfile ( ) . getUser ( ) ; // add to the user to jobs mapping
synchronized ( userToJobsMap ) { ArrayList < JobInProgress > userJobs = userToJobsMap . get ( jobUser ) ; if ( userJobs == null ) { userJobs = new ArrayList < JobInProgress > ( ) ; userToJobsMap . put ( jobUser , userJobs ) ; } userJobs . add ( job ) ; } |
public class ScheduledThreadPoolExecutor { /** * Constrains the values of all delays in the queue to be within
* Long . MAX _ VALUE of each other , to avoid overflow in compareTo .
* This may occur if a task is eligible to be dequeued , but has
* not yet been , while some other task is added with a delay of
* Long . MAX _ VALUE . */
private long overflowFree ( long delay ) { } } | Delayed head = ( Delayed ) super . getQueue ( ) . peek ( ) ; if ( head != null ) { long headDelay = head . getDelay ( NANOSECONDS ) ; if ( headDelay < 0 && ( delay - headDelay < 0 ) ) delay = Long . MAX_VALUE + headDelay ; } return delay ; |
public class DITypeInfo { /** * Retrieves the fully - qualified name of the HSQLDB - provided java . sql
* interface implementation class whose instances would be manufactured
* by HSQLDB to retrieve column values of this type , if the
* the type does not have a standard Java mapping . < p >
* This value is simply the expected class name , regardless of whether
* HSQLDB , the specific HSQLDB distribution instance or the hosting JVM
* actually provide or support such implementations . That is , as of a
* specific release , HSQLDB may not yet provide such an implementation
* or may not automatically map to it or may not support it as a table
* column type , the version of java . sql may not define the interface to
* implement and the HSQLDB jar may not contain the implementation
* classes , even if they are defined in the corresponding release
* and build options and are supported under the hosting JVM ' s java . sql
* version . < p >
* @ return the fully - qualified name of the HSQLDB - provided java . sql
* interface implementation class whose instances would
* be manufactured by HSQLDB to retrieve column values of
* this type , given that the type does not have a standard Java
* mapping and regardless of whether a class with the indicated
* name is actually implemented or available on the class path */
String getCstMapClsName ( ) { } } | switch ( type ) { case Types . SQL_ARRAY : return "org.hsqldb_voltpatches.jdbc.JDBCArray" ; case Types . SQL_BLOB : return "org.hsqldb_voltpatches.jdbc.JDBCBlob" ; case Types . SQL_CLOB : return "org.hsqldb_voltpatches.jdbc.JDBCClob" ; case Types . NCLOB : return "org.hsqldb_voltpatches.jdbc.JDBCNClob" ; case Types . DISTINCT : return "org.hsqldb_voltpatches.jdbc.JDBCDistinct" ; case Types . SQL_REF : return "org.hsqldb_voltpatches.jdbc.JDBCRef" ; case Types . ROWID : return "org.hsqldb_voltpatches.jdbc.JDBCRowId" ; case Types . STRUCT : return "org.hsqldb_voltpatches.jdbc.JDBCStruct" ; case Types . SQL_XML : return "org.hsqldb_voltpatches.jdbc.JDBCSQLXML" ; default : return null ; } |
public class ConfigUtil { /** * Like { @ link # loadInheritedProperties ( String ) } but loads the properties into the supplied
* target object . */
public static void loadInheritedProperties ( String path , Properties target ) throws IOException { } } | loadInheritedProperties ( path , ConfigUtil . class . getClassLoader ( ) , target ) ; |
public class EJBApplicationMetaData { /** * Verifies that the specified bean only depends on other singletons and that it does
* not depend on itself . This method calls itself recursively to process all
* dependencies .
* @ param bmd
* the bean to check
* @ param used
* the set of dependent beans that are already being processed
* @ param checked
* the set of beans that have already been processed
* @ throws RuntimeWarning
* if verification fails */
private void resolveBeanDependencies ( BeanMetaData bmd , Set < BeanMetaData > used ) throws RuntimeWarning { } } | boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "resolveBeanDependencies: " + bmd . j2eeName ) ; // F7434950 . CodRev - If another bean depended on this bean , then
// this bean ' s dependencies have already been resolved and verified .
// F743-20281 - Alternatively , this bean ' s dependencies might have been
// resolved early , in which case we don ' t need to resolve them again .
if ( bmd . ivDependsOn != null ) { return ; } bmd . ivDependsOn = new ArrayList < J2EEName > ( ) ; Set < String > dependsOnLinks = ivSingletonDependencies . remove ( bmd ) ; if ( dependsOnLinks == null ) { return ; } for ( String dependencyLink : dependsOnLinks ) { HomeRecord hr = resolveEJBLink ( bmd . j2eeName , dependencyLink ) ; BeanMetaData dependency = hr . getBeanMetaData ( ) ; J2EEName dependencyName = dependency . j2eeName ; if ( ! dependency . isSingletonSessionBean ( ) ) { Tr . error ( tc , "SINGLETON_DEPENDS_ON_NON_SINGLETON_BEAN_CNTR0200E" , new Object [ ] { bmd . j2eeName . getComponent ( ) , bmd . j2eeName . getModule ( ) , dependencyName . getComponent ( ) , dependencyName . getModule ( ) } ) ; throw new RuntimeWarning ( "CNTR0200E: The " + bmd . j2eeName . getComponent ( ) + " singleton session bean in the " + bmd . j2eeName . getModule ( ) + " module depends on the " + dependencyName . getComponent ( ) + " enterprise bean in the " + dependencyName . getModule ( ) + ", but the target is not a singleton session bean." ) ; } if ( ! used . add ( dependency ) ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "circular dependency from " + dependencyName ) ; Tr . error ( tc , "SINGLETON_DEPENDS_ON_SELF_CNTR0201E" , new Object [ ] { dependencyName . getComponent ( ) , dependencyName . getModule ( ) } ) ; throw new RuntimeWarning ( "CNTR0201E: The " + dependencyName . getComponent ( ) + " singleton session bean in the " + dependencyName . getModule ( ) + " module directly or indirectly depends on itself." ) ; } bmd . ivDependsOn . add ( dependencyName ) ; // d588220
resolveBeanDependencies ( dependency , used ) ; used . remove ( dependency ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resolveBeanDependencies: " + bmd . j2eeName ) ; |
public class JMOptional { /** * Is present all boolean .
* @ param optionals the optionals
* @ return the boolean */
public static boolean isPresentAll ( Optional < ? > ... optionals ) { } } | for ( Optional < ? > optional : optionals ) if ( ! optional . isPresent ( ) ) return false ; return true ; |
public class PropertyDispatcher { /** * Creates a instance of a class PropertyDispatcher subclass .
* @ param < T > Type of PropertyDispatcher subclass
* @ param cls PropertyDispatcher subclass class
* @ param dispatcher
* @ return */
public static < T extends PropertyDispatcher > T getInstance ( Class < T > cls , PropertySetterDispatcher dispatcher ) { } } | try { PropertyDispatcherClass annotation = cls . getAnnotation ( PropertyDispatcherClass . class ) ; if ( annotation == null ) { throw new IllegalArgumentException ( "@" + PropertyDispatcherClass . class . getSimpleName ( ) + " missing in cls" ) ; } Class < ? > c = Class . forName ( annotation . value ( ) ) ; if ( dispatcher == null ) { T t = ( T ) c . newInstance ( ) ; return t ; } else { Constructor < ? > constructor = c . getConstructor ( PropertySetterDispatcher . class ) ; return ( T ) constructor . newInstance ( dispatcher ) ; } } catch ( InvocationTargetException | SecurityException | NoSuchMethodException | ClassNotFoundException | InstantiationException | IllegalAccessException ex ) { throw new IllegalArgumentException ( ex ) ; } |
public class GasteigerPEPEPartialCharges { /** * get the electrostatic potential of the neighbours of a atom .
* @ param ac The IAtomContainer to study
* @ param ds
* @ param atom1 The position of the IAtom to study
* @ return The sum of electrostatic potential of the neighbours */
private double getElectrostaticPotentialN ( IAtomContainer ac , int atom1 , double [ ] ds ) { } } | // double CoulombForceConstant = 1 / ( 4 * Math . PI * 8.81 / * Math . pow ( 10 , - 12 ) * / ) ;
double CoulombForceConstant = 0.048 ; double sum = 0.0 ; try { if ( factory == null ) factory = AtomTypeFactory . getInstance ( "org/openscience/cdk/config/data/jmol_atomtypes.txt" , ac . getBuilder ( ) ) ; List < IAtom > atoms = ac . getConnectedAtomsList ( ac . getAtom ( atom1 ) ) ; for ( IAtom atom : atoms ) { double covalentradius = 0 ; String symbol = atom . getSymbol ( ) ; IAtomType type = factory . getAtomType ( symbol ) ; covalentradius = type . getCovalentRadius ( ) ; double charge = ds [ STEP_SIZE * atom1 + atom1 + 5 ] ; logger . debug ( "sum_(" + sum + ") = CFC(" + CoulombForceConstant + ")*charge(" + charge + "/ret(" + covalentradius ) ; sum += CoulombForceConstant * charge / ( covalentradius * covalentradius ) ; } } catch ( CDKException e ) { logger . debug ( e ) ; } return sum ; |
public class AbstractWsBridgeSession { /** * Start up timer for the session timeout of the WebSocket session */
public void startupSessionTimeoutCommand ( ) { } } | if ( initSessionTimeoutCommand . compareAndSet ( false , true ) ) { final Long sessionTimeout = getSessionTimeout ( ) ; if ( sessionTimeout != null && sessionTimeout > 0 ) { if ( scheduledEventslogger . isTraceEnabled ( ) ) { scheduledEventslogger . trace ( "Establishing a session timeout of " + sessionTimeout + " seconds for WebSocket session (" + getId ( ) + ")." ) ; } scheduleCommand ( this . sessionTimeout , sessionTimeout ) ; } } |
public class InterceptorChain { /** * Adds a new interceptor in list after an interceptor of a given type .
* @ return true if the interceptor was added ; i . e . the afterInterceptor exists */
public boolean addInterceptorAfter ( CommandInterceptor toAdd , Class < ? extends CommandInterceptor > afterInterceptor ) { } } | return asyncInterceptorChain . addInterceptorAfter ( toAdd , afterInterceptor ) ; |
public class GitRepo { /** * Pushes the commits od current branch .
* @ param project - Git project */
void pushCurrentBranch ( File project ) { } } | try ( Git git = this . gitFactory . open ( file ( project ) ) ) { this . gitFactory . push ( git ) . call ( ) ; } catch ( Exception e ) { throw new IllegalStateException ( e ) ; } |
public class TriggerManager { /** * Determines which operations the given trigger overrides . */
private int selectTypes ( Trigger < ? super S > trigger ) { } } | Class < ? extends Trigger > triggerClass = trigger . getClass ( ) ; int types = 0 ; if ( overridesOneMethod ( triggerClass , INSERT_METHODS ) ) { types |= FOR_INSERT ; } if ( overridesOneMethod ( triggerClass , UPDATE_METHODS ) ) { types |= FOR_UPDATE ; } if ( overridesOneMethod ( triggerClass , DELETE_METHODS ) ) { types |= FOR_DELETE ; } if ( overridesMethod ( triggerClass , AFTER_LOAD_METHOD ) ) { types |= FOR_LOAD ; } return types ; |
public class GrailsASTUtils { /** * Generates a fatal compilation error .
* @ param sourceUnit the SourceUnit
* @ param astNode the ASTNode which caused the error
* @ param message The error message */
public static void error ( final SourceUnit sourceUnit , final ASTNode astNode , final String message ) { } } | error ( sourceUnit , astNode , message , true ) ; |
public class PoolManager { /** * Get the pool name for a JobInProgress from its configuration . This uses
* the " project " property in the jobconf by default , or the property set with
* " mapred . fairscheduler . poolnameproperty " . */
public synchronized String getPoolName ( JobInProgress job ) { } } | String name = getExplicitPoolName ( job ) . trim ( ) ; String redirect = poolRedirectMap . get ( name ) ; if ( redirect == null ) { return name ; } else { return redirect ; } |
public class IoUtil { /** * Write the entire contents of the supplied string to the given stream . This method always flushes and closes the stream when
* finished .
* @ param input the content to write to the stream ; may be null
* @ param stream the stream to which the content is to be written
* @ param bufferSize the size of the buffer ; must be positive
* @ throws IOException
* @ throws IllegalArgumentException if the stream is null */
public static void write ( InputStream input , OutputStream stream , int bufferSize ) throws IOException { } } | CheckArg . isNotNull ( stream , "destination stream" ) ; CheckArg . isPositive ( bufferSize , "bufferSize" ) ; boolean error = false ; try { if ( input != null ) { byte [ ] buffer = new byte [ bufferSize ] ; try { int numRead = 0 ; while ( ( numRead = input . read ( buffer ) ) > - 1 ) { stream . write ( buffer , 0 , numRead ) ; } } finally { input . close ( ) ; } } } catch ( IOException e ) { error = true ; // this error should be thrown , even if there is an error flushing / closing stream
throw e ; } catch ( RuntimeException e ) { error = true ; // this error should be thrown , even if there is an error flushing / closing stream
throw e ; } finally { try { stream . flush ( ) ; } catch ( IOException e ) { if ( ! error ) throw e ; } finally { try { stream . close ( ) ; } catch ( IOException e ) { if ( ! error ) throw e ; } } } |
public class TagletWriterImpl { /** * { @ inheritDoc } */
public Content commentTagsToOutput ( DocTree holderTag , List < ? extends DocTree > tags ) { } } | return commentTagsToOutput ( holderTag , null , tags , false ) ; |
public class TypeRegistry { /** * Sometimes we discover the reloadabletype during program execution , for example A calls B and we haven ' t yet seen
* B . We find B has been loaded by a parent classloader , let ' s remember B here so we can do fast lookups for it .
* @ param typeId the id for the type
* @ param rtype the ReloadableType to associate with the id */
public void rememberReloadableType ( int typeId , ReloadableType rtype ) { } } | if ( typeId >= reloadableTypes . length ) { resizeReloadableTypeArray ( typeId ) ; } reloadableTypes [ typeId ] = rtype ; if ( ( typeId + 1 ) > reloadableTypesSize ) { reloadableTypesSize = typeId + 1 ; } |
public class ObjectsApi { /** * Get DNs or agent groups . ( asynchronously )
* Get DNs ( directory numbers ) or agent groups from Configuration Server with the specified filters .
* @ param objectType The type of object . Possible values are dns , skills , dn - groups or agent - groups . ( required )
* @ param dnType If the object _ type is & # 39 ; dns & # 39 ; , then you need to specify the DN type ( for example , CFGRoutingPoint ) . For possible values , see [ CfgDNType ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDNType ) in the Platform SDK documentation . ( optional )
* @ param dnGroups If the object _ type is & # 39 ; dns & # 39 ; , may contain a list of DN group names to filter DNs . ( optional )
* @ param groupType If the object _ type is & # 39 ; agent - groups & # 39 ; , then you need to specify the agent group type . ( optional )
* @ param limit The number of objects the Provisioning API should return . ( optional )
* @ param offset The number of matches the Provisioning API should skip in the returned objects . ( optional )
* @ param searchTerm The term that you want to search for in the object keys . The Provisioning API searches for the this term in the value of the key you specify in & # 39 ; search _ key & # 39 ; . ( optional )
* @ param searchKey The key you want the Provisioning API to use when searching for the term you specified in & # 39 ; search _ term & # 39 ; . You can find valid key names in the Platform SDK documentation for [ CfgDN ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDN ) and [ CfgAgentGroup ] ( https : / / docs . genesys . com / Documentation / PSDK / latest / ConfigLayerRef / CfgAgentGroup ) . ( optional )
* @ param matchMethod The method the Provisioning API should use to match the & # 39 ; search _ term & # 39 ; . Possible values are includes , startsWith , endsWith , and isEqual . ( optional , default to includes )
* @ param sortKey A key in [ CfgDN ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgDN ) , [ CfgSkill ] ( https : / / docs . genesys . com / Documentation / PSDK / 9.0 . x / ConfigLayerRef / CfgSkill ) or [ CfgAgentGroup ] ( https : / / docs . genesys . com / Documentation / PSDK / latest / ConfigLayerRef / CfgAgentGroup ) to sort the search results . ( optional )
* @ param sortAscending Specifies whether to sort the search results in ascending or descending order . ( optional , default to true )
* @ param sortMethod Specifies the sort method . Possible values are caseSensitive , caseInsensitive or numeric . ( optional , default to caseSensitive )
* @ param dbids Comma - separated list of DNs to be fetched . ( optional )
* @ param inUse Specifies whether to return only skills actually assigned to agents . ( optional , default to false )
* @ param callback The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException If fail to process the API call , e . g . serializing the request body object */
public com . squareup . okhttp . Call getObjectAsync ( String objectType , String dnType , List < String > dnGroups , String groupType , Integer limit , Integer offset , String searchTerm , String searchKey , String matchMethod , String sortKey , Boolean sortAscending , String sortMethod , String dbids , Boolean inUse , final ApiCallback < GetObjectsSuccessResponse > callback ) throws ApiException { } } | ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = getObjectValidateBeforeCall ( objectType , dnType , dnGroups , groupType , limit , offset , searchTerm , searchKey , matchMethod , sortKey , sortAscending , sortMethod , dbids , inUse , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < GetObjectsSuccessResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ; |
public class StringType { /** * The localized string and the internal string value are equal . So the
* internal value can be set directly with method { @ link # setValue } .
* @ param _ values values to evaluate
* @ return string representation for the < code > _ values < / code > */
protected String eval ( final Object [ ] _values ) { } } | final String ret ; if ( ( _values == null ) || ( _values . length == 0 ) || ( _values [ 0 ] == null ) ) { ret = null ; } else if ( _values [ 0 ] instanceof String ) { ret = ( String ) _values [ 0 ] ; } else if ( _values [ 0 ] != null ) { ret = _values [ 0 ] . toString ( ) ; } else { ret = null ; } return ret ; |
public class HirshbergMatcher { /** * Gets the Longest Common Subsequence of two strings , using Dynamic
* programming techniques , and minimal memory
* @ param strA the first String
* @ param strB the second String
* @ return the Longest Common Subsequence of strA and strB */
public String getLCS ( String strA , String strB ) { } } | if ( "" . equals ( strA ) ) { return "" ; } StringBuilder sb = new StringBuilder ( ) ; algC ( sb , strA . length ( ) , strB . length ( ) , strA , strB ) ; return sb . toString ( ) ; |
public class StringTrieBuilder { /** * Makes sure that there is only one unique FinalValueNode registered
* with this value .
* Avoids creating a node if the value is a duplicate .
* @ param value A final value .
* @ return A FinalValueNode with the given value . */
private final ValueNode registerFinalValue ( int value ) { } } | // We always register final values because while ADDING
// we do not know yet whether we will build fast or small .
lookupFinalValueNode . setFinalValue ( value ) ; Node oldNode = nodes . get ( lookupFinalValueNode ) ; if ( oldNode != null ) { return ( ValueNode ) oldNode ; } ValueNode newNode = new ValueNode ( value ) ; // If put ( ) returns a non - null value from an equivalent , previously
// registered node , then get ( ) failed to find that and we will leak newNode .
oldNode = nodes . put ( newNode , newNode ) ; assert ( oldNode == null ) ; return newNode ; |
public class GetIndividualProfilesRequest { /** * Request the IndividualProfile for the given bank account information .
* This option is mutually exclusive with { @ link # withIndividualId ( int ) }
* and { @ link # withLoginPassword ( String , char [ ] ) } .
* @ param routingNumber The individual ' s bank routing number .
* @ param accountNumber The individual ' s bank account number .
* @ return this . */
public GetIndividualProfilesRequest withMICR ( final String routingNumber , final String accountNumber ) { } } | this . routingNumber = routingNumber ; this . accountNumber = accountNumber ; return this ; |
public class PageFlowRequestProcessor { /** * See if this action mapping is our custom config type , and if so , see if the action should use a member variable
* in the page flow controller as its form bean ( the < code > useFormBean < / code > attribute on
* < code > & # 64 ; Jpf . Action < / code > ) . If so , return the appropriate Field in the controller class . */
private Field getPageFlowScopedFormMember ( ActionMapping mapping , HttpServletRequest request ) { } } | if ( mapping instanceof PageFlowActionMapping ) { PageFlowActionMapping pfam = ( PageFlowActionMapping ) mapping ; String formMember = pfam . getFormMember ( ) ; if ( formMember == null ) return null ; Field field = null ; FlowController fc = PageFlowRequestWrapper . get ( request ) . getCurrentFlowController ( ) ; try { field = fc . getClass ( ) . getDeclaredField ( formMember ) ; } catch ( NoSuchFieldException e ) { // try finding a non - private field from the class hierarchy
field = InternalUtils . lookupField ( fc . getClass ( ) , formMember ) ; if ( field == null || Modifier . isPrivate ( field . getModifiers ( ) ) ) { LOG . error ( "Could not find page flow member " + formMember + " as the form bean." ) ; return null ; } } if ( ! Modifier . isPublic ( field . getModifiers ( ) ) ) field . setAccessible ( true ) ; return field ; } return null ; |
public class Roster { /** * Returns a List of Presence objects for all of a user ' s current presences if no presence information is available ,
* such as when you are not subscribed to the user ' s presence updates .
* @ param bareJid an XMPP ID , e . g . jdoe @ example . com .
* @ return a List of Presence objects for all the user ' s current presences , or an unavailable presence if no
* presence information is available . */
public List < Presence > getAllPresences ( BareJid bareJid ) { } } | Map < Resourcepart , Presence > userPresences = getPresencesInternal ( bareJid ) ; List < Presence > res ; if ( userPresences == null ) { // Create an unavailable presence if none was found
Presence unavailable = new Presence ( Presence . Type . unavailable ) ; unavailable . setFrom ( bareJid ) ; res = new ArrayList < > ( Arrays . asList ( unavailable ) ) ; } else { res = new ArrayList < > ( userPresences . values ( ) . size ( ) ) ; for ( Presence presence : userPresences . values ( ) ) { res . add ( presence . clone ( ) ) ; } } return res ; |
public class TaggedArgumentParser { /** * Parse a tag string and populate a TaggedArgument with values .
* @ param taggedArg TaggedArgument to receive tags
* @ param longArgName name of the argument being tagged
* @ param tagString tag string ( including logical name and attributes , no option name ) */
public static void populateArgumentTags ( final TaggedArgument taggedArg , final String longArgName , final String tagString ) { } } | if ( tagString == null ) { taggedArg . setTag ( null ) ; taggedArg . setTagAttributes ( Collections . emptyMap ( ) ) ; } else { final ParsedArgument pa = ParsedArgument . of ( longArgName , tagString ) ; taggedArg . setTag ( pa . getName ( ) ) ; taggedArg . setTagAttributes ( pa . keyValueMap ( ) ) ; } |
public class ElementUI { /** * Returns the URL of the default template to use in createFromTemplate . Override this method to
* provide an alternate default URL .
* @ return The template URL . */
protected String getTemplateUrl ( ) { } } | return "web/" + getClass ( ) . getPackage ( ) . getName ( ) . replace ( "." , "/" ) + "/" + StringUtils . uncapitalize ( getClass ( ) . getSimpleName ( ) ) + ".fsp" ; |
public class MetricReportReporter { /** * Extracts metrics from { @ link com . codahale . metrics . Gauge } .
* @ param name name of the { @ link com . codahale . metrics . Gauge } .
* @ param gauge instance of { @ link com . codahale . metrics . Gauge } to serialize .
* @ return a list of { @ link org . apache . gobblin . metrics . Metric } . */
protected List < Metric > serializeGauge ( String name , Gauge gauge ) { } } | List < Metric > metrics = Lists . newArrayList ( ) ; try { metrics . add ( new Metric ( name , Double . parseDouble ( gauge . getValue ( ) . toString ( ) ) ) ) ; } catch ( NumberFormatException exception ) { LOGGER . info ( "Failed to serialize gauge metric. Not compatible with double value." , exception ) ; } return metrics ; |
public class GreenPepperXmlRpcClient { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) public Runner getRunner ( String name , String identifier ) throws GreenPepperServerException { } } | log . debug ( "Retreiving all runners" ) ; Vector < Object > runnerParams = ( Vector < Object > ) execute ( XmlRpcMethodName . getRunner , CollectionUtil . toVector ( name ) , identifier ) ; return XmlRpcDataMarshaller . toRunner ( runnerParams ) ; |
public class PermutationChromosome { /** * Create a new , random chromosome with the given valid alleles .
* @ since 2.0
* @ param < T > the gene type of the chromosome
* @ param alleles the valid alleles used for this permutation arrays .
* @ return a new chromosome with the given alleles
* @ throws IllegalArgumentException if the given allele array is empty .
* @ throws NullPointerException if one of the alleles is { @ code null } */
@ SafeVarargs public static < T > PermutationChromosome < T > of ( final T ... alleles ) { } } | return of ( ISeq . of ( alleles ) ) ; |
public class AgentRoster { /** * Reloads the entire roster from the server . This is an asynchronous operation ,
* which means the method will return immediately , and the roster will be
* reloaded at a later point when the server responds to the reload request .
* @ throws NotConnectedException
* @ throws InterruptedException */
public void reload ( ) throws NotConnectedException , InterruptedException { } } | AgentStatusRequest request = new AgentStatusRequest ( ) ; request . setTo ( workgroupJID ) ; connection . sendStanza ( request ) ; |
public class OkCoinMarketDataServiceRaw { /** * 获取合约最高限价和最低限价
* @ param currencyPair
* @ param contractType
* @ return
* @ throws IOException */
public OkCoinFutureComment getFuturePriceLimit ( CurrencyPair currencyPair , FuturesContract contractType ) throws IOException { } } | return okCoin . getFuturePriceLimit ( "1" , OkCoinAdapters . adaptSymbol ( currencyPair ) , contractType . getName ( ) ) ; |
public class UntilElementImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case SimpleAntlrPackage . UNTIL_ELEMENT__LEFT : return left != null ; case SimpleAntlrPackage . UNTIL_ELEMENT__RIGHT : return right != null ; } return super . eIsSet ( featureID ) ; |
public class DataEncoder { /** * Encodes the given signed Long object into exactly 1 or 9 bytes . If the
* Long object is never expected to be null , consider encoding as a long
* primitive .
* @ param value optional signed Long value to encode
* @ param dst destination for encoded bytes
* @ param dstOffset offset into destination array
* @ return amount of bytes written */
public static int encode ( Long value , byte [ ] dst , int dstOffset ) { } } | if ( value == null ) { dst [ dstOffset ] = NULL_BYTE_HIGH ; return 1 ; } else { dst [ dstOffset ] = NOT_NULL_BYTE_HIGH ; encode ( value . longValue ( ) , dst , dstOffset + 1 ) ; return 9 ; } |
public class DiagnosticsInner { /** * Execute Analysis .
* Execute Analysis .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param siteName Site Name
* @ param diagnosticCategory Category Name
* @ param analysisName Analysis Resource Name
* @ param startTime Start Time
* @ param endTime End Time
* @ param timeGrain Time Grain
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws DefaultErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the DiagnosticAnalysisInner object if successful . */
public DiagnosticAnalysisInner executeSiteAnalysis ( String resourceGroupName , String siteName , String diagnosticCategory , String analysisName , DateTime startTime , DateTime endTime , String timeGrain ) { } } | return executeSiteAnalysisWithServiceResponseAsync ( resourceGroupName , siteName , diagnosticCategory , analysisName , startTime , endTime , timeGrain ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class SystemBar { /** * Set the content layout full the NavigationBar , but do not hide NavigationBar . */
@ RequiresApi ( api = Build . VERSION_CODES . LOLLIPOP ) public static void invasionNavigationBar ( Window window ) { } } | View decorView = window . getDecorView ( ) ; decorView . setSystemUiVisibility ( decorView . getSystemUiVisibility ( ) | View . SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View . SYSTEM_UI_FLAG_LAYOUT_STABLE ) ; window . setNavigationBarColor ( Color . TRANSPARENT ) ; |
public class ExternalContext { /** * < p class = " changed _ added _ 2_0 " > Returns the MIME type of the
* specified file or < code > null < / code > if the MIME type is not
* known . The MIME type is determined by the container . < / p >
* < p class = " changed _ added _ 2_0 " > It is valid to call this method
* during application startup or shutdown . If called during application
* startup or shutdown , this method calls through to the
* < code > getMimeType ( ) < / code > method on the same container
* context instance ( < code > ServletContext < / code > or
* < code > PortletContext < / code > ) as the one used when calling
* < code > getMimeType ( ) < / code > on the
* < code > ExternalContext < / code > returned by the
* < code > FacesContext < / code > during an actual request . < / p >
* < div class = " changed _ added _ 2_0 " >
* < p > < em > Servlet : < / em > This must be the value returned by the
* < code > javax . servlet . ServletContext < / code > method
* < code > getMimeType ( ) < / code > . < / p >
* < / div >
* @ param file The file for which the mime type should be obtained .
* @ since 2.0 */
public String getMimeType ( String file ) { } } | if ( defaultExternalContext != null ) { return defaultExternalContext . getMimeType ( file ) ; } throw new UnsupportedOperationException ( ) ; |
public class BaseDestinationHandler { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # registerForMessageEvents ( com . ibm . ws . sib . processor . impl . interfaces . SIMPMessage ) */
@ Override public void registerForMessageEvents ( SIMPMessage msg ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerForMessageEvents" , msg ) ; registerForEvents ( msg ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerForMessageEvents" ) ; |
public class JdbcDatabase { /** * Open the physical database .
* @ exception DBException On open errors . */
public boolean setupDirectDataSourceConnection ( ) throws DBException { } } | if ( m_JDBCConnection != null ) return true ; try { if ( m_datasourceFactory == null ) { String strClassName = this . getProperty ( SQLParams . DATASOURCE_FACTORY ) ; strClassName = ClassServiceUtility . getFullClassName ( strClassName ) ; try { m_datasourceFactory = ( DatasourceFactory ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strClassName ) ; // xClass < ? > c = Class . forName ( strClassName ) ;
// xm _ datasourceFactory = ( DatasourceFactory ) c . newInstance ( ) ;
} catch ( Exception e ) { Utility . getLogger ( ) . warning ( "Error on create class: " + strClassName ) ; e . printStackTrace ( ) ; } } if ( m_datasourceFactory == null ) return false ; // No factory
ConnectionPoolDataSource poolDataSource = m_datasourceFactory . getPooledDataSource ( this ) ; if ( poolDataSource != null ) if ( poolDataSource . getPooledConnection ( ) != null ) m_JDBCConnection = poolDataSource . getPooledConnection ( ) . getConnection ( ) ; if ( m_JDBCConnection == null ) { // Otherwise , just try plain datasource connection
DataSource dataSource = m_datasourceFactory . getFakePooledDataSource ( this ) ; if ( dataSource != null ) m_JDBCConnection = dataSource . getConnection ( ) ; } if ( m_JDBCConnection == null ) { // Otherwise , just try plain datasource connection
DataSource dataSource = m_datasourceFactory . getDataSource ( this ) ; if ( dataSource != null ) m_JDBCConnection = dataSource . getConnection ( ) ; else return false ; // Failure
} } catch ( SQLException e ) { return false ; // Failure
} return true ; // Success ! |
public class SchemasInner { /** * Gets a list of integration account schemas .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; IntegrationAccountSchemaInner & gt ; object */
public Observable < Page < IntegrationAccountSchemaInner > > listByIntegrationAccountsAsync ( final String resourceGroupName , final String integrationAccountName ) { } } | return listByIntegrationAccountsWithServiceResponseAsync ( resourceGroupName , integrationAccountName ) . map ( new Func1 < ServiceResponse < Page < IntegrationAccountSchemaInner > > , Page < IntegrationAccountSchemaInner > > ( ) { @ Override public Page < IntegrationAccountSchemaInner > call ( ServiceResponse < Page < IntegrationAccountSchemaInner > > response ) { return response . body ( ) ; } } ) ; |
public class CommerceOrderNoteLocalServiceBaseImpl { /** * Updates the commerce order note in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceOrderNote the commerce order note
* @ return the commerce order note that was updated */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceOrderNote updateCommerceOrderNote ( CommerceOrderNote commerceOrderNote ) { } } | return commerceOrderNotePersistence . update ( commerceOrderNote ) ; |
public class AggregateEventAnalysisEngine { /** * Pops { @ link Notification } s out of the queue until the start time of the queue ' s head
* is after the parameter time . The queue of notifications MUST be sorted in ascending
* order by start time .
* @ param notifications the queue of { @ link Notification } s being trimmed
* @ param time the time that all { @ link Notification } s in the queue must be after */
protected void trim ( Queue < Notification > notifications , DateTime time ) { } } | while ( ! notifications . isEmpty ( ) && ! notifications . peek ( ) . getStartTime ( ) . isAfter ( time ) ) { notifications . poll ( ) ; } |
public class AbstractCacheHealthIndicator { /** * Computes the status code for a given set of cache statistics .
* @ param statistics Cache statistics .
* @ return WARN or OUT _ OF _ SERVICE OR UP . */
protected Status status ( final CacheStatistics statistics ) { } } | if ( statistics . getEvictions ( ) > 0 && statistics . getEvictions ( ) > evictionThreshold ) { return new Status ( "WARN" ) ; } if ( statistics . getPercentFree ( ) > 0 && statistics . getPercentFree ( ) < threshold ) { return Status . OUT_OF_SERVICE ; } return Status . UP ; |
public class XLinkUtils { /** * Returns a distinct List of all RemoteToolViews , contained in a RemoteToolRegistration .
* @ see RemoteToolView
* @ see RemoteToolRegistration */
public static XLinkConnectorView [ ] getViewsOfRegistration ( XLinkConnectorRegistration registration ) { } } | List < XLinkConnectorView > viewsOfRegistration = new ArrayList < XLinkConnectorView > ( ) ; Map < ModelDescription , XLinkConnectorView [ ] > modelsToViews = registration . getModelsToViews ( ) ; for ( XLinkConnectorView [ ] views : modelsToViews . values ( ) ) { for ( int i = 0 ; i < views . length ; i ++ ) { XLinkConnectorView view = views [ i ] ; if ( ! viewsOfRegistration . contains ( view ) ) { viewsOfRegistration . add ( view ) ; } } } return viewsOfRegistration . toArray ( new XLinkConnectorView [ 0 ] ) ; |
public class CSSMinifierMojo { /** * Minifies the created files .
* @ param file is the file .
* @ return { @ literal false } if the pipeline processing must be interrupted for this event . Most watchers should
* return { @ literal true } to let other watchers be notified .
* @ throws org . wisdom . maven . WatchingException if the watcher failed to process the given file . */
@ Override public boolean fileCreated ( File file ) throws WatchingException { } } | if ( stylesheets != null ) { try { process ( stylesheets ) ; } catch ( MojoExecutionException e ) { throw new WatchingException ( "Error while aggregating or minifying CSS resources" , file , e ) ; } } else { process ( file ) ; } return true ; |
public class JFAPCommunicator { /** * Utility method to invalidate Connection . Parameters passed to ConnectionInterface . invalidate
* @ param notifyPeer
* @ param throwable
* @ param debugReason */
protected void invalidateConnection ( boolean notifyPeer , Throwable throwable , String debugReason ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "invalidateConnection" , new Object [ ] { new Boolean ( notifyPeer ) , throwable , debugReason } ) ; if ( con != null ) { ConnectionInterface connection = con . getConnectionReference ( ) ; if ( connection != null ) { connection . invalidate ( notifyPeer , throwable , debugReason ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "invalidateConnection" ) ; |
public class ListMatrix { /** * { @ inheritDoc } */
public double [ ] getColumn ( int column ) { } } | int i = 0 ; double [ ] columnValues = new double [ vectors . size ( ) ] ; for ( DoubleVector vector : vectors ) columnValues [ i ++ ] = vector . get ( column ) ; return columnValues ; |
public class SmartTable { /** * Configures the specified style names on the specified row and column . The first style is set
* as the primary style and additional styles are added onto that . */
public void setStyleNames ( int row , int column , String ... styles ) { } } | int idx = 0 ; for ( String style : styles ) { if ( idx ++ == 0 ) { getFlexCellFormatter ( ) . setStyleName ( row , column , style ) ; } else { getFlexCellFormatter ( ) . addStyleName ( row , column , style ) ; } } |
public class Get { /** * Retrieves all attributes of the element . If the element isn ' t present , or
* the attributes can ' t be accessed , a null value will be returned .
* @ return String : the value of the css attribute */
@ SuppressWarnings ( "unchecked" ) public Map < String , String > allAttributes ( ) { } } | if ( ! element . is ( ) . present ( ) ) { return null ; } try { WebElement webElement = element . getWebElement ( ) ; JavascriptExecutor js = ( JavascriptExecutor ) driver ; return ( Map < String , String > ) js . executeScript ( "var items = {}; for (index = 0; index < arguments[0].attributes.length; ++index) { items[arguments[0].attributes[index].name] = arguments[0].attributes[index].value }; return items;" , webElement ) ; } catch ( NoSuchMethodError | Exception e ) { log . warn ( e ) ; return null ; } |
public class DoubleMatrix { /** * Returns the four adjacencies with order : up , right , down , left . < code > null < / code > is set if the adjacency doesn ' t exist .
* @ param i
* @ param j
* @ return */
public Stream < IntPair > adjacent4Points ( final int i , final int j ) { } } | final IntPair up = i == 0 ? null : IntPair . of ( i - 1 , j ) ; final IntPair right = j == cols - 1 ? null : IntPair . of ( i , j + 1 ) ; final IntPair down = i == rows - 1 ? null : IntPair . of ( i + 1 , j ) ; final IntPair left = j == 0 ? null : IntPair . of ( i , j - 1 ) ; return Stream . of ( up , right , down , left ) ; |
public class PreferenceFragment { /** * Initializes the preference , which allows to show the edit text dialog . */
private void initializeShowEditTextDialogPreference ( ) { } } | Preference preference = findPreference ( getString ( R . string . show_edit_text_dialog_preference_key ) ) ; preference . setOnPreferenceClickListener ( new OnPreferenceClickListener ( ) { @ Override public boolean onPreferenceClick ( final Preference preference ) { initializeEditTextDialog ( ) ; editTextDialog . setShowAnimation ( createRectangularRevealAnimation ( preference ) ) ; editTextDialog . setDismissAnimation ( createRectangularRevealAnimation ( preference ) ) ; editTextDialog . setCancelAnimation ( createRectangularRevealAnimation ( preference ) ) ; editTextDialog . show ( ) ; return true ; } } ) ; |
public class StepProgress { /** * No constructor with auto logging - call beginStep ( ) first */
@ Override public StringBuilder appendToBuffer ( StringBuilder buf ) { } } | buf . append ( super . getTask ( ) ) ; if ( isComplete ( ) ) { buf . append ( ": complete." ) ; } else { buf . append ( " #" ) . append ( getProcessed ( ) + 1 ) . append ( '/' ) . append ( getTotal ( ) ) ; buf . append ( ": " ) . append ( getStepTitle ( ) ) ; } buf . append ( '\n' ) ; return buf ; |
public class FlowContextFactory { /** * Add flowContext to ThreadLocal when coming to another component .
* @ param flowContext */
public static void addFlowContext ( final FlowContext flowContext ) { } } | if ( null == flowContext ) { clearFlowcontext ( ) ; return ; } FLOW_CONTEXT_THREAD_LOCAL . set ( flowContext ) ; MDC . put ( "flowCtxt" , flowContext . toString ( ) ) ; |
public class DefaultDmnTransform { /** * listeners / / / / / */
protected void notifyTransformListeners ( Decision decision , DmnDecision dmnDecision ) { } } | for ( DmnTransformListener transformListener : transformListeners ) { transformListener . transformDecision ( decision , dmnDecision ) ; } |
public class WFileWidget { /** * Returns a list of strings that determine the allowable file mime types accepted by the file input . If no types
* have been added an empty list is returned . An empty list indicates that all file types are accepted .
* @ return The mime types accepted by this file input e . g . " text / plain " , " text / html " , " application / pdf " . */
public List < String > getFileTypes ( ) { } } | List < String > fileTypes = getComponentModel ( ) . fileTypes ; if ( fileTypes == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( fileTypes ) ; |
public class PUBLISH { /** * Crates a deep copy of a { @ link PUBLISH } object .
* Use this method if you want to reuse a publish received by a callback to prevent side effects .
* @ param original the original PUBLISH message
* @ return a deep copy of the original PUBLISH message */
public static PUBLISH copy ( final PUBLISH original ) { } } | final PUBLISH publish = new PUBLISH ( ) ; publish . setQoS ( original . getQoS ( ) ) ; publish . setRetain ( original . isRetain ( ) ) ; publish . setPayload ( original . getPayload ( ) ) ; publish . setTopic ( original . getTopic ( ) ) ; publish . setDuplicateDelivery ( original . isDuplicateDelivery ( ) ) ; publish . setMessageId ( original . getMessageId ( ) ) ; publish . setTTL ( original . getTTL ( ) ) ; return publish ; |
public class CPDefinitionLocalServiceWrapper { /** * Returns the cp definition matching the UUID and group .
* @ param uuid the cp definition ' s UUID
* @ param groupId the primary key of the group
* @ return the matching cp definition
* @ throws PortalException if a matching cp definition could not be found */
@ Override public com . liferay . commerce . product . model . CPDefinition getCPDefinitionByUuidAndGroupId ( String uuid , long groupId ) throws com . liferay . portal . kernel . exception . PortalException { } } | return _cpDefinitionLocalService . getCPDefinitionByUuidAndGroupId ( uuid , groupId ) ; |
public class Filters { /** * Specify one or more < a > KeyUsage < / a > extension values .
* @ param keyUsage
* Specify one or more < a > KeyUsage < / a > extension values .
* @ see KeyUsageName */
public void setKeyUsage ( java . util . Collection < String > keyUsage ) { } } | if ( keyUsage == null ) { this . keyUsage = null ; return ; } this . keyUsage = new java . util . ArrayList < String > ( keyUsage ) ; |
public class JmsEventTransportImpl { /** * Publish the supplied event to the destination . The destination address ( to publish the event to )
* will be derived using the plugged destination resolver . If you want to implement different / more
* advanced destination resolution , implement your own JMSDestinationResolver . Note that obviously
* both the publisher and consumer must arrive at the same destination name , so should be using the
* same convention ( since they ' re unlikely to be running in the same Cougar instance )
* The event will be published using a thread from the pool and its associated jms session
* @ param event
* @ throws com . betfair . cougar . core . api . exception . CougarException
* @ see com . betfair . cougar . transport . api . protocol . events . jms . JMSDestinationResolver */
@ Override public void publish ( Event event ) throws CougarException { } } | String destinationName = destinationResolver . resolveDestination ( event . getClass ( ) , null ) ; publish ( event , destinationName , eventServiceBindingDescriptor ) ; |
public class Collectors { /** * Returns a { @ code Collector } which performs a reduction of its
* input elements under a specified { @ code BinaryOperator } using the
* provided identity .
* @ apiNote
* The { @ code reducing ( ) } collectors are most useful when used in a
* multi - level reduction , downstream of { @ code groupingBy } or
* { @ code partitioningBy } . To perform a simple reduction on a stream ,
* use { @ link Stream # reduce ( Object , BinaryOperator ) } } instead .
* @ param < T > element type for the input and output of the reduction
* @ param identity the identity value for the reduction ( also , the value
* that is returned when there are no input elements )
* @ param op a { @ code BinaryOperator < T > } used to reduce the input elements
* @ return a { @ code Collector } which implements the reduction operation
* @ see # reducing ( BinaryOperator )
* @ see # reducing ( Object , Function , BinaryOperator ) */
public static < T > Collector < T , ? , T > reducing ( T identity , BinaryOperator < T > op ) { } } | return new CollectorImpl < > ( boxSupplier ( identity ) , ( a , t ) -> { a [ 0 ] = op . apply ( a [ 0 ] , t ) ; } , ( a , b ) -> { a [ 0 ] = op . apply ( a [ 0 ] , b [ 0 ] ) ; return a ; } , a -> a [ 0 ] , CH_NOID ) ; |
public class FileUtil { /** * 打开文件为InputStream .
* @ see { @ link Files # newInputStream } */
public static InputStream asInputStream ( File file ) throws IOException { } } | Validate . notNull ( file , "file is null" ) ; return asInputStream ( file . toPath ( ) ) ; |
public class DataTable { /** * Asserts that the given attribute exists as a column in this table .
* @ param attribute the attribute to find
* @ throws UnknownColumnException if the attribute doesn ' t exist */
private void assertAttributeExists ( String attribute ) throws UnknownColumnException { } } | for ( DataColumn column : columns ) { if ( column . getCode ( ) . equals ( attribute ) ) { return ; } } throw new UnknownColumnException ( attribute ) ; |
public class HiveRegistrationUnit { /** * Set table / partition parameters .
* When using { @ link org . apache . gobblin . hive . metastore . HiveMetaStoreBasedRegister } , since it internally use
* { @ link org . apache . hadoop . hive . metastore . api . Table } and { @ link org . apache . hadoop . hive . metastore . api . Partition }
* which distinguishes between table / partition parameters , storage descriptor parameters , and serde parameters ,
* one may need to distinguish them when constructing a { @ link HiveRegistrationUnit } by using
* { @ link # setProps ( State ) } , { @ link # setStorageProps ( State ) } and
* { @ link # setSerDeProps ( State ) } . When using query - based Hive registration , they do not need to be
* distinguished since all parameters will be passed via TBLPROPERTIES . */
public void setProps ( State props ) { } } | for ( String propKey : props . getPropertyNames ( ) ) { setProp ( propKey , props . getProp ( propKey ) ) ; } |
public class Layer { /** * Serializes this Layer as a { @ link com . google . gwt . json . client . JSONObject }
* @ return JSONObject */
@ Override public JSONObject toJSONObject ( ) { } } | final JSONObject object = new JSONObject ( ) ; object . put ( "type" , new JSONString ( getNodeType ( ) . getValue ( ) ) ) ; if ( hasMetaData ( ) ) { final MetaData meta = getMetaData ( ) ; if ( false == meta . isEmpty ( ) ) { object . put ( "meta" , new JSONObject ( meta . getJSO ( ) ) ) ; } } object . put ( "attributes" , new JSONObject ( getAttributes ( ) . getJSO ( ) ) ) ; final NFastArrayList < IPrimitive < ? > > list = getChildNodes ( ) ; final JSONArray children = new JSONArray ( ) ; if ( null != list ) { final int size = list . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final IPrimitive < ? > prim = list . get ( i ) ; if ( null != prim ) { final JSONObject make = prim . toJSONObject ( ) ; if ( null != make ) { children . set ( children . size ( ) , make ) ; } } } } object . put ( "children" , children ) ; object . put ( "storage" , getStorageEngine ( ) . toJSONObject ( ) ) ; return object ; |
public class HibernateClient { /** * On begin .
* @ return the transaction */
private Transaction onBegin ( ) { } } | Transaction tx ; if ( ( ( StatelessSessionImpl ) s ) . getTransactionCoordinator ( ) . isTransactionActive ( ) ) { tx = ( ( StatelessSessionImpl ) s ) . getTransaction ( ) ; } else { tx = s . beginTransaction ( ) ; } return tx ; |
public class InMemoryAsyncRetryRegistry { /** * { @ inheritDoc } */
@ Override public AsyncRetry retry ( String name , Supplier < RetryConfig > retryConfigSupplier ) { } } | return retries . computeIfAbsent ( Objects . requireNonNull ( name , "Name must not be null" ) , ( k ) -> AsyncRetry . of ( name , retryConfigSupplier . get ( ) ) ) ; |
public class BaseField { /** * Get the status of the the FieldChanged behaviors ?
* @ return The handle field change flag ( if false field change handling is disabled ) . */
public void setEnableListeners ( boolean [ ] rgbEnabled ) { } } | int iIndex = 0 ; FieldListener fieldBehavior = this . getListener ( ) ; while ( fieldBehavior != null ) { boolean bEnable = true ; if ( ( rgbEnabled != null ) && ( iIndex < rgbEnabled . length ) ) bEnable = rgbEnabled [ iIndex ] ; fieldBehavior . setEnabledListener ( bEnable ) ; fieldBehavior = ( FieldListener ) fieldBehavior . getNextListener ( ) ; iIndex ++ ; } |
public class EvalVisitor { /** * If the value is a proto , then set the current access location since we are about to access it
* incorrectly . */
private static void maybeMarkBadProtoAccess ( ExprNode expr , SoyValue value ) { } } | if ( value instanceof SoyProtoValue ) { ( ( SoyProtoValue ) value ) . setAccessLocationKey ( expr . getSourceLocation ( ) ) ; } |
public class DefaultDecomposer { /** * ( non - Javadoc )
* @ see org . jsmpp . util . PDUDecomposer # outbind ( byte [ ] ) */
public Outbind outbind ( byte [ ] b ) throws PDUStringException { } } | Outbind req = new Outbind ( ) ; SequentialBytesReader reader = new SequentialBytesReader ( b ) ; assignHeader ( req , reader ) ; req . setSystemId ( reader . readCString ( ) ) ; StringValidator . validateString ( req . getSystemId ( ) , StringParameter . SYSTEM_ID ) ; req . setPassword ( reader . readCString ( ) ) ; StringValidator . validateString ( req . getPassword ( ) , StringParameter . PASSWORD ) ; return req ; |
public class AlertService { /** * Deletes an alert including its notifications and triggers .
* @ param alertId The ID of the alert to delete .
* @ throws IOException If the server cannot be reached .
* @ throws TokenExpiredException If the token sent along with the request has expired */
public void deleteAlert ( BigInteger alertId ) throws IOException , TokenExpiredException { } } | String requestUrl = RESOURCE + "/" + alertId . toString ( ) ; ArgusResponse response = getClient ( ) . executeHttpRequest ( ArgusHttpClient . RequestType . DELETE , requestUrl , null ) ; assertValidResponse ( response , requestUrl ) ; |
public class TxtExporter { /** * Write one tsv field to the file , followed by a separator unless it is the
* last field on the line . Lead and trailing blanks will be removed .
* @ param p print stream
* @ param s The string to write . Any additional quotes or embedded quotes
* will be provided by put . Null means start a new line . */
private void put ( PrintStream p , String s , int column , int colSpan , BandElement bandElement ) { } } | if ( s == null ) { // nl ( ) ;
put ( p , "" , column , colSpan , bandElement ) ; return ; } int size = 0 ; if ( colSpan > 1 ) { for ( int i = column ; i < column + colSpan ; i ++ ) { size += columnWidth [ i ] ; } } else { size = columnWidth [ column ] ; } if ( ( bandElement != null ) && bandElement . getHorizontalAlign ( ) == BandElement . RIGHT ) { p . print ( String . format ( "%" + size + "s" , s ) ) ; } else { p . print ( String . format ( "%-" + size + "s" , s ) ) ; } |
public class BindDictionary { /** * static image field methods */
public StaticImageField < T > addStaticImageField ( int viewResId , StaticImageLoader < T > staticImageLoader ) { } } | StaticImageField < T > field = new StaticImageField < T > ( viewResId , staticImageLoader ) ; mStaticImageFields . add ( field ) ; return field ; |
public class AssignmentGuardFinder { /** * Creates a new instance of this class . None of the arguments must be
* { @ code null } .
* @ param candidateName
* name of the lazy variable . Must not be empty !
* @ param controlFlowBlock
* the control flow block which is supposed to contain an
* { @ link AssignmentGuard } .
* @ return a new instance of this class . */
public static AssignmentGuardFinder newInstance ( final String candidateName , final ControlFlowBlock controlFlowBlock ) { } } | checkArgument ( ! candidateName . isEmpty ( ) ) ; return new AssignmentGuardFinder ( candidateName , checkNotNull ( controlFlowBlock ) ) ; |
public class DRL5Expressions { /** * $ ANTLR start synpred10 _ DRL5Expressions */
public final void synpred10_DRL5Expressions_fragment ( ) throws RecognitionException { } } | // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 407:5 : ( DOUBLE _ AMPER ( fullAnnotation [ null ] ) ? operator )
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 407:6 : DOUBLE _ AMPER ( fullAnnotation [ null ] ) ? operator
{ match ( input , DOUBLE_AMPER , FOLLOW_DOUBLE_AMPER_in_synpred10_DRL5Expressions1890 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 407:19 : ( fullAnnotation [ null ] ) ?
int alt92 = 2 ; int LA92_0 = input . LA ( 1 ) ; if ( ( LA92_0 == AT ) ) { alt92 = 1 ; } switch ( alt92 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 407:19 : fullAnnotation [ null ]
{ pushFollow ( FOLLOW_fullAnnotation_in_synpred10_DRL5Expressions1892 ) ; fullAnnotation ( null ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } pushFollow ( FOLLOW_operator_in_synpred10_DRL5Expressions1896 ) ; operator ( ) ; state . _fsp -- ; if ( state . failed ) return ; } |
public class AbstractBean { /** * Sets the process ( the what ) that functionally modified this object .
* @ param modifyingProcess an object denoting the process that modified this object .
* @ see org . cp . elements . lang . Auditable */
@ SuppressWarnings ( "unchecked" ) public void setModifiedWith ( PROCESS modifyingProcess ) { } } | processChange ( "modifiedWith" , this . modifiedWith , modifyingProcess ) ; this . lastModifiedWith = ObjectUtils . defaultIfNull ( this . lastModifiedWith , this . modifiedWith ) ; |
public class ItemReference { /** * PK57207 Returns an estimated size for this message reference */
public int getInMemoryDataSize ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getInMemoryDataSize" ) ; int dataSize ; // If tuning has requested return the size of the message we reference ,
// then delegate to the message . Otherwise call our parent to get a
// ( small ) default size .
if ( _sizeRefsByMsgSize ) { try { dataSize = getReferredItem ( ) . getInMemoryDataSize ( ) ; } catch ( SevereMessageStoreException e ) { com . ibm . ws . ffdc . FFDCFilter . processException ( e , "com.ibm.ws.sib.msgstore.ItemReference.getInMemoryDataSize" , "244" , this ) ; // After FFDCing anything nasty , fall back to the standard answer
dataSize = super . getInMemoryDataSize ( ) ; } } else { dataSize = super . getInMemoryDataSize ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getInMemoryDataSize" , dataSize ) ; return dataSize ; |
public class TexFileUtilsImpl { /** * ( non - Javadoc )
* @ see org . m2latex . mojo . TexFileUtils # copyOutputToSiteFolder ( java . io . File , java . io . File , java . io . File ) */
public void copyLatexOutputToOutputFolder ( File texFile , File tempDirectory , File outputDirectory ) throws MojoExecutionException , MojoFailureException { } } | WildcardFileFilter fileFilter = new WildcardFileFilter ( getFilesToCopy ( texFile , LATEX_OUTPUT_FILES ) ) ; copyLatexOutputToOutputFolder ( texFile , tempDirectory , outputDirectory , fileFilter ) ; |
public class RESTAppListener { /** * Set required VirtualHost . This will be called before activate .
* The target filter will only allow the enabled default _ host virtual
* host to be bound if / when it has an SSL port available */
@ Reference ( service = VirtualHost . class , target = "(&(enabled=true)(id=default_host)(httpsAlias=*))" , policy = ReferencePolicy . STATIC , cardinality = ReferenceCardinality . MANDATORY ) protected void setVirtualHost ( VirtualHost vhost , Map < String , Object > props ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Set vhost: " , vhost ) ; } secureVirtualHost = vhost ; secureAlias = props . get ( "httpsAlias" ) . toString ( ) ; createJMXWorkAreaResourceIfChanged ( vhost ) ; |
public class ManagedBeanDefinition { /** * Create this managed bean operations info .
* @ return */
private MBeanOperationInfo [ ] getOperationInfo ( ) { } } | final List < MBeanOperationInfo > infoList = new ArrayList < > ( ) ; if ( type != null ) { ReflectionUtils . doWithMethods ( type , new ReflectionUtils . MethodCallback ( ) { @ Override public void doWith ( Method method ) throws IllegalArgumentException , IllegalAccessException { infoList . add ( new MBeanOperationInfo ( OPERATION_DESCRIPTION , method ) ) ; } } , new ReflectionUtils . MethodFilter ( ) { @ Override public boolean matches ( Method method ) { return method . getDeclaringClass ( ) . equals ( type ) && ! method . getName ( ) . startsWith ( "set" ) && ! method . getName ( ) . startsWith ( "get" ) && ! method . getName ( ) . startsWith ( "is" ) && ! method . getName ( ) . startsWith ( "$jacoco" ) ; // Fix for code coverage
} } ) ; } else { for ( ManagedBeanInvocation . Operation operation : operations ) { List < MBeanParameterInfo > parameterInfo = new ArrayList < > ( ) ; int i = 1 ; for ( OperationParam parameter : operation . getParameter ( ) . getParameter ( ) ) { parameterInfo . add ( new MBeanParameterInfo ( "p" + i ++ , parameter . getType ( ) , "Parameter #" + i ) ) ; } infoList . add ( new MBeanOperationInfo ( operation . getName ( ) , OPERATION_DESCRIPTION , parameterInfo . toArray ( new MBeanParameterInfo [ operation . getParameter ( ) . getParameter ( ) . size ( ) ] ) , operation . getReturnType ( ) , MBeanOperationInfo . UNKNOWN ) ) ; } } return infoList . toArray ( new MBeanOperationInfo [ infoList . size ( ) ] ) ; |
public class Train { /** * Inits the . */
public void init ( ) { } } | numLabels = model . data . numLabels ( ) ; numFeatures = model . feaGen . numFeatures ( ) ; if ( numLabels <= 0 || numFeatures <= 0 ) { System . out . println ( "Invalid number of labels or features" ) ; return ; } lambda = model . lambda ; tempLambda = new double [ numFeatures ] ; gradLogLi = new double [ numFeatures ] ; diag = new double [ numFeatures ] ; temp = new double [ numLabels ] ; int wsSize = numFeatures * ( 2 * model . option . mForHessian + 1 ) + 2 * model . option . mForHessian ; ws = new double [ wsSize ] ; iprint = new int [ 2 ] ; iflag = new int [ 1 ] ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.