signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ContainerUtil { /** * Returns the cube ID for the container . By default , this is the container * name , but can be overridden by the user in the container properties , e . g . * < code > & lt ; cubeId & gt ; pod - name & lt ; / cubeId & gt ; < / code > . * @ param container * the arquillian container * @ return the cube ID for the specified container */ public static String getCubeIDForContainer ( Container container ) { } }
final String cubeID ; final Map < String , String > containerProperties = container . getContainerConfiguration ( ) . getContainerProperties ( ) ; if ( containerProperties == null ) { // test cases may not mock entire hierarchy cubeID = null ; } else { cubeID = containerProperties . get ( "cubeId" ) ; } return cubeID == null ? container . getName ( ) : cubeID ;
public class NodeHierarchyCreatorImpl { /** * { @ inheritDoc } */ public Node getUserApplicationNode ( SessionProvider sessionProvider , String userName ) throws Exception { } }
Node userNode = getUserNode ( sessionProvider , userName ) ; return dataDistributionManager_ . getDataDistributionType ( DataDistributionMode . NONE ) . getOrCreateDataNode ( userNode , getJcrPath ( USER_APPLICATION ) ) ;
public class J4pClient { /** * Execute a single J4pRequest which returns a single response . * @ param pRequest request to execute * @ param pMethod method to use which should be either " GET " or " POST " * @ param pProcessingOptions optional map of processing options * @ param < RESP > response type * @ param < REQ > request type * @ return response object * @ throws J4pException if something ' s wrong ( e . g . connection failed or read timeout ) */ public < RESP extends J4pResponse < REQ > , REQ extends J4pRequest > RESP execute ( REQ pRequest , String pMethod , Map < J4pQueryParameter , String > pProcessingOptions ) throws J4pException { } }
return this . < RESP , REQ > execute ( pRequest , pMethod , pProcessingOptions , responseExtractor ) ;
public class NFRuleSet { /** * Determine the best fraction rule to use . Rules matching the decimal point from * DecimalFormatSymbols become the main set of rules to use . * @ param originalIndex The index into nonNumericalRules * @ param newRule The new rule to consider * @ param rememberRule Should the new rule be added to fractionRules . */ private void setBestFractionRule ( int originalIndex , NFRule newRule , boolean rememberRule ) { } }
if ( rememberRule ) { if ( fractionRules == null ) { fractionRules = new LinkedList < NFRule > ( ) ; } fractionRules . add ( newRule ) ; } NFRule bestResult = nonNumericalRules [ originalIndex ] ; if ( bestResult == null ) { nonNumericalRules [ originalIndex ] = newRule ; } else { // We have more than one . Which one is better ? DecimalFormatSymbols decimalFormatSymbols = owner . getDecimalFormatSymbols ( ) ; if ( decimalFormatSymbols . getDecimalSeparator ( ) == newRule . getDecimalPoint ( ) ) { nonNumericalRules [ originalIndex ] = newRule ; } // else leave it alone }
public class LogicExpression { /** * Return a list of the arguments contained in the expression . * @ return */ public List < String > getArgs ( ) { } }
List < String > args = new ArrayList < String > ( ) ; getArgs ( this . expression , args ) ; return args ;
public class _Private_Utils { /** * Calls { @ link InputStream # read ( byte [ ] , int , int ) } until the buffer is * filled or EOF is encountered . * This method will block until the request is satisfied . * @ param in The stream to read from . * @ param buf The buffer to read to . * @ return the number of bytes read from the stream . May be less than * { @ code buf . length } if EOF is encountered before reading that far . * @ see # readFully ( InputStream , byte [ ] , int , int ) */ public static int readFully ( InputStream in , byte [ ] buf ) throws IOException { } }
return readFully ( in , buf , 0 , buf . length ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTunnelType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTunnelType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/tunnel/2.0" , name = "_AbstractTunnel" , substitutionHeadNamespace = "http://www.opengis.net/citygml/2.0" , substitutionHeadName = "_Site" ) public JAXBElement < AbstractTunnelType > create_AbstractTunnel ( AbstractTunnelType value ) { } }
return new JAXBElement < AbstractTunnelType > ( __AbstractTunnel_QNAME , AbstractTunnelType . class , null , value ) ;
public class ChannelServiceImpl { /** * 根据NodeId和Channel状态找到对应的Channel列表 。 */ public List < Channel > listByNodeId ( Long nodeId , ChannelStatus ... statuses ) { } }
List < Channel > channels = new ArrayList < Channel > ( ) ; List < Channel > results = new ArrayList < Channel > ( ) ; try { List < Pipeline > pipelines = pipelineService . listByNodeId ( nodeId ) ; List < Long > pipelineIds = new ArrayList < Long > ( ) ; for ( Pipeline pipeline : pipelines ) { pipelineIds . add ( pipeline . getId ( ) ) ; } if ( pipelineIds . isEmpty ( ) ) { // 没有关联任务直接返回 return channels ; } // 反查对应的channel channels = listByPipelineIds ( pipelineIds . toArray ( new Long [ pipelineIds . size ( ) ] ) ) ; if ( null == statuses || statuses . length == 0 ) { return channels ; } for ( Channel channel : channels ) { for ( ChannelStatus status : statuses ) { if ( channel . getStatus ( ) . equals ( status ) ) { results . add ( channel ) ; } } } } catch ( Exception e ) { logger . error ( "ERROR ## list query channel by nodeId:" + nodeId + " has an exception!" ) ; throw new ManagerException ( e ) ; } return results ;
public class Functions { /** * A { @ link Collector } that collects { @ linkplain Optional optional } values to a list . * The collector only collects values that are { @ linkplain Optional # isPresent ( ) present } . * @ param < T > the type of values to collect . * @ return a collector that collects optional values to a list . */ @ SuppressWarnings ( "unchecked" ) public static < T > Collector < Optional < T > , List < T > , List < T > > flatList ( ) { } }
return ( Collector ) FLAT_LIST ;
public class MoreMeters { /** * Returns a newly - registered { @ link Timer } configured by { @ link # distributionStatisticConfig ( ) } . */ public static Timer newTimer ( MeterRegistry registry , String name , Iterable < Tag > tags ) { } }
requireNonNull ( registry , "registry" ) ; requireNonNull ( name , "name" ) ; requireNonNull ( tags , "tags" ) ; final Duration maxExpectedValue = Optional . ofNullable ( distStatCfg . getMaximumExpectedValue ( ) ) . map ( Duration :: ofNanos ) . orElse ( null ) ; final Duration minExpectedValue = Optional . ofNullable ( distStatCfg . getMinimumExpectedValue ( ) ) . map ( Duration :: ofNanos ) . orElse ( null ) ; return Timer . builder ( name ) . tags ( tags ) . maximumExpectedValue ( maxExpectedValue ) . minimumExpectedValue ( minExpectedValue ) . publishPercentiles ( distStatCfg . getPercentiles ( ) ) . publishPercentileHistogram ( distStatCfg . isPercentileHistogram ( ) ) . distributionStatisticBufferLength ( distStatCfg . getBufferLength ( ) ) . distributionStatisticExpiry ( distStatCfg . getExpiry ( ) ) . register ( registry ) ;
public class LottieDrawable { /** * Sets the minimum frame that the animation will start from when playing or looping . */ public void setMinFrame ( final int minFrame ) { } }
if ( composition == null ) { lazyCompositionTasks . add ( new LazyCompositionTask ( ) { @ Override public void run ( LottieComposition composition ) { setMinFrame ( minFrame ) ; } } ) ; return ; } animator . setMinFrame ( minFrame ) ;
public class CollUtil { /** * 过滤 < br > * 过滤过程通过传入的Filter实现来过滤返回需要的元素内容 , 这个Filter实现可以实现以下功能 : * < pre > * 1 、 过滤出需要的对象 , { @ link Filter # accept ( Object ) } 方法返回true的对象将被加入结果集合中 * < / pre > * @ param < T > 集合元素类型 * @ param list 集合 * @ param filter 过滤器 * @ return 过滤后的数组 * @ since 4.1.8 */ public static < T > List < T > filter ( List < T > list , Filter < T > filter ) { } }
if ( null == list || null == filter ) { return list ; } final List < T > list2 = ( list instanceof LinkedList ) ? new LinkedList < T > ( ) : new ArrayList < T > ( list . size ( ) ) ; for ( T t : list ) { if ( filter . accept ( t ) ) { list2 . add ( t ) ; } } return list2 ;
public class SocketBindingJBossASClient { /** * Adds a socket binding with the given name in the named socket binding group . * If sysPropName is null , this simply sets the port number explicitly to the given port number . * If sysPropName is not null , this sets the port to the expression " $ { sysPropName : port } " . * If a socket binding with the given name already exists , this method does nothing . * @ param socketBindingGroupName the name of the socket binding group in which to create the named socket binding * @ param socketBindingName the name of the socket binding to be created with the given port * @ param sysPropName the name of the system property whose value is to be the port number * @ param port the default port number if the sysPropName is not defined * @ throws Exception any error */ public void addSocketBinding ( String socketBindingGroupName , String socketBindingName , String sysPropName , int port ) throws Exception { } }
if ( isSocketBinding ( socketBindingGroupName , socketBindingName ) ) { return ; } String portValue ; if ( sysPropName != null ) { portValue = "${" + sysPropName + ":" + port + "}" ; } else { portValue = String . valueOf ( port ) ; } Address addr = Address . root ( ) . add ( SOCKET_BINDING_GROUP , socketBindingGroupName , SOCKET_BINDING , socketBindingName ) ; final ModelNode request = new ModelNode ( ) ; setPossibleExpression ( request , PORT , portValue ) ; request . get ( OPERATION ) . set ( ADD ) ; request . get ( ADDRESS ) . set ( addr . getAddressNode ( ) ) ; ModelNode results = execute ( request ) ; if ( ! isSuccess ( results ) ) { throw new FailureException ( results ) ; } return ; // everything is OK
public class WebService { /** * method to combine the new MonomerStore to the existing one , in case of * xHELM as input * @ param monomerStore * MonomerStore * @ throws MonomerLoadingException * if the monomer store can not be read * @ throws IOException * if the monomer store can not be read * @ throws MonomerException * if the monomer is not valid * @ throws ChemistryException * if the chemistry can not be initialized */ private void updateMonomerStore ( MonomerStore monomerStore ) throws MonomerLoadingException , IOException , MonomerException , ChemistryException { } }
for ( Monomer monomer : monomerStore . getAllMonomersList ( ) ) { MonomerFactory . getInstance ( ) . getMonomerStore ( ) . addNewMonomer ( monomer ) ; // save monomer db to local file after successful update / / MonomerFactory . getInstance ( ) . saveMonomerCache ( ) ; }
public class Quartz { /** * Method to initialize the Quartz . * @ throws EFapsException on error */ public static void initialize ( ) throws EFapsException { } }
Quartz . QUARTZ = new Quartz ( ) ; try { // Kernel - Configuration final SystemConfiguration config = EFapsSystemConfiguration . get ( ) ; final Properties props = config . getAttributeValueAsProperties ( KernelSettings . QUARTZPROPS ) ; final StdSchedulerFactory schedFact = new StdSchedulerFactory ( ) ; javax . naming . Context envCtx = null ; String lookup = "java:global/" ; try { final InitialContext initCtx = new InitialContext ( ) ; envCtx = ( javax . naming . Context ) initCtx . lookup ( lookup ) ; } catch ( final NamingException e ) { Quartz . LOG . info ( "Catched NamingException on evaluation for Quartz" ) ; } // for a build the context might be different , try this before surrender if ( envCtx == null ) { try { lookup = "java:comp/env" ; final InitialContext initCtx = new InitialContext ( ) ; envCtx = ( javax . naming . Context ) initCtx . lookup ( lookup ) ; } catch ( final NamingException e ) { Quartz . LOG . info ( "Catched NamingException on evaluation for Quartz" ) ; } } try { final DelegatingUserTransaction trans = ( DelegatingUserTransaction ) envCtx . lookup ( INamingBinds . RESOURCE_USERTRANSACTION ) ; // QuartzTrigger trans . setUserName ( Person . get ( UUID . fromString ( "df2f02a7-c556-49ad-b019-e13db66e1cbf" ) ) . getName ( ) ) ; } catch ( final NamingException e ) { Quartz . LOG . info ( "Catched NamingException on evaluation for Quartz" ) ; } props . put ( StdSchedulerFactory . PROP_SCHED_USER_TX_URL , lookup + "/" + INamingBinds . RESOURCE_USERTRANSACTION ) ; props . put ( StdSchedulerFactory . PROP_SCHED_WRAP_JOB_IN_USER_TX , "true" ) ; props . put ( StdSchedulerFactory . PROP_THREAD_POOL_CLASS , "org.quartz.simpl.SimpleThreadPool" ) ; props . put ( StdSchedulerFactory . PROP_SCHED_JOB_FACTORY_CLASS , SimpleJobFactory . class . getName ( ) ) ; props . put ( "org.quartz.plugin.jobInitializer.class" , "org.efaps.admin.common.QuartzSchedulerPlugin" ) ; if ( ! props . containsKey ( StdSchedulerFactory . PROP_SCHED_MAKE_SCHEDULER_THREAD_DAEMON ) ) { props . put ( StdSchedulerFactory . PROP_SCHED_MAKE_SCHEDULER_THREAD_DAEMON , "true" ) ; } if ( ! props . containsKey ( StdSchedulerFactory . PROP_SCHED_INSTANCE_NAME ) ) { props . put ( StdSchedulerFactory . PROP_SCHED_INSTANCE_NAME , "eFapsScheduler" ) ; } if ( ! props . containsKey ( "org.quartz.threadPool.threadCount" ) ) { props . put ( "org.quartz.threadPool.threadCount" , "2" ) ; } if ( ! props . containsKey ( "org.quartz.plugin.triggHistory.class" ) ) { props . put ( "org.quartz.plugin.triggHistory.class" , "org.quartz.plugins.history.LoggingTriggerHistoryPlugin" ) ; props . put ( "org.quartz.plugin.triggHistory.triggerFiredMessage" , "Trigger {1}.{0} fired job {6}.{5} at: {4, date, HH:mm:ss MM/dd/yyyy}" ) ; props . put ( "org.quartz.plugin.triggHistory.triggerCompleteMessage" , "Trigger {1}.{0} completed firing job {6}.{5} at {4, date, HH:mm:ss MM/dd/yyyy}." ) ; } Quartz . LOG . info ( "Sheduling Quartz with properties {}" , props ) ; schedFact . initialize ( props ) ; Quartz . QUARTZ . scheduler = schedFact . getScheduler ( "eFapsScheduler" ) ; if ( Quartz . QUARTZ . scheduler != null ) { Quartz . QUARTZ . scheduler . shutdown ( ) ; } Quartz . QUARTZ . scheduler = schedFact . getScheduler ( ) ; if ( config . getAttributeValueAsBoolean ( KernelSettings . MSGTRIGGERACTIVE ) ) { final int interval = config . getAttributeValueAsInteger ( KernelSettings . MSGTRIGGERINTERVAL ) ; final Trigger trigger = TriggerBuilder . newTrigger ( ) . withIdentity ( "SystemMessageTrigger" ) . withSchedule ( SimpleScheduleBuilder . repeatMinutelyForever ( interval > 0 ? interval : 1 ) ) . build ( ) ; JobDetail jobDetail = Quartz . QUARTZ . scheduler . getJobDetail ( new JobKey ( "SystemMessage" , Quartz . QUARTZGROUP ) ) ; if ( jobDetail == null ) { jobDetail = JobBuilder . newJob ( MessageStatusHolder . class ) . withIdentity ( "SystemMessage" , Quartz . QUARTZGROUP ) . build ( ) ; Quartz . QUARTZ . scheduler . scheduleJob ( jobDetail , trigger ) ; } else { Quartz . QUARTZ . scheduler . rescheduleJob ( new TriggerKey ( "SystemMessageTrigger" , Quartz . QUARTZGROUP ) , trigger ) ; } } Quartz . QUARTZ . scheduler . start ( ) ; } catch ( final SchedulerException e ) { throw new EFapsException ( Quartz . class , "Quartz.SchedulerException" , e ) ; }
public class BsfUtils { /** * Transform a snake - case string to a camel - case one . * @ param snakeCase * @ return */ public static String snakeCaseToCamelCase ( String snakeCase ) { } }
if ( snakeCase . contains ( "-" ) ) { StringBuilder camelCaseStr = new StringBuilder ( snakeCase . length ( ) ) ; boolean toUpperCase = false ; for ( char c : snakeCase . toCharArray ( ) ) { if ( c == '-' ) toUpperCase = true ; else { if ( toUpperCase ) { toUpperCase = false ; c = Character . toUpperCase ( c ) ; } camelCaseStr . append ( c ) ; } } snakeCase = camelCaseStr . toString ( ) ; } return snakeCase ;
public class CmsLinkProcessor { /** * Visitor method to process a tag ( start ) . < p > * @ param tag the tag to process */ @ Override public void visitTag ( Tag tag ) { } }
if ( tag instanceof LinkTag ) { processLinkTag ( ( LinkTag ) tag ) ; } else if ( tag instanceof ImageTag ) { processImageTag ( ( ImageTag ) tag ) ; } else if ( tag instanceof ObjectTag ) { processObjectTag ( ( ObjectTag ) tag ) ; } else { // there are no specialized tag classes for these tags : ( if ( TAG_EMBED . equals ( tag . getTagName ( ) ) ) { processEmbedTag ( tag ) ; } else if ( TAG_AREA . equals ( tag . getTagName ( ) ) ) { processAreaTag ( tag ) ; } } // append text content of the tag ( may have been changed by above methods ) super . visitTag ( tag ) ;
public class snmpuser { /** * Use this API to fetch filtered set of snmpuser resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static snmpuser [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
snmpuser obj = new snmpuser ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; snmpuser [ ] response = ( snmpuser [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class FSDirectory { /** * Get { @ link INode } associated with the file . */ INodeFile getFileINode ( String src ) { } }
byte [ ] [ ] components = INodeDirectory . getPathComponents ( src ) ; readLock ( ) ; try { INode inode = rootDir . getNode ( components ) ; if ( inode == null || inode . isDirectory ( ) ) return null ; return ( INodeFile ) inode ; } finally { readUnlock ( ) ; }
public class TraceEventHelper { /** * Get the structured pool data * @ param data The data * @ param ignoreDelist Should DELIST be ignored * @ param ignoreTracking Should TRACKING be ignored * @ param ignoreIncomplete Ignore incomplete traces * @ return The result */ public static Map < String , List < Interaction > > getPoolData ( List < TraceEvent > data , boolean ignoreDelist , boolean ignoreTracking , boolean ignoreIncomplete ) { } }
// Pool - > Interactions Map < String , List < Interaction > > result = new TreeMap < String , List < Interaction > > ( ) ; // Pool - > ConnectionListener - > Events Map < String , Map < String , List < TraceEvent > > > temp = new TreeMap < String , Map < String , List < TraceEvent > > > ( ) ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { TraceEvent te = data . get ( i ) ; Map < String , List < TraceEvent > > m = temp . get ( te . getPool ( ) ) ; if ( m == null ) m = new TreeMap < String , List < TraceEvent > > ( ) ; List < TraceEvent > l = m . get ( te . getConnectionListener ( ) ) ; if ( l == null ) l = new ArrayList < TraceEvent > ( ) ; l . add ( te ) ; if ( isEndState ( te ) ) { Interaction interaction = new Interaction ( te . getThreadId ( ) , l . get ( 0 ) . getTimestamp ( ) , l . get ( l . size ( ) - 1 ) . getTimestamp ( ) , l , getStatus ( l , ignoreDelist , ignoreTracking , ignoreIncomplete ) ) ; List < Interaction > pool = result . get ( te . getPool ( ) ) ; if ( pool == null ) pool = new ArrayList < Interaction > ( ) ; pool . add ( interaction ) ; result . put ( te . getPool ( ) , pool ) ; l = null ; } m . put ( te . getConnectionListener ( ) , l ) ; temp . put ( te . getPool ( ) , m ) ; } if ( ! ignoreIncomplete ) { for ( Map . Entry < String , Map < String , List < TraceEvent > > > poolEntry : temp . entrySet ( ) ) { for ( Map . Entry < String , List < TraceEvent > > clEntry : poolEntry . getValue ( ) . entrySet ( ) ) { List < TraceEvent > l = clEntry . getValue ( ) ; if ( l != null ) { Interaction interaction = new Interaction ( l . get ( 0 ) . getThreadId ( ) , l . get ( 0 ) . getTimestamp ( ) , l . get ( l . size ( ) - 1 ) . getTimestamp ( ) , l , getStatus ( l , ignoreDelist , ignoreTracking , ignoreIncomplete ) ) ; List < Interaction > pool = result . get ( poolEntry . getKey ( ) ) ; if ( pool == null ) pool = new ArrayList < Interaction > ( ) ; pool . add ( interaction ) ; result . put ( poolEntry . getKey ( ) , pool ) ; } } } } return result ;
public class TransactionMethodInterceptor { /** * Complete the transaction * @ param tx * @ param readOnly * the read - only flag on the transaction ( if true , the transaction will be rolled back , otherwise the transaction will be */ private final void complete ( Transaction tx , boolean readOnly ) { } }
if ( log . isTraceEnabled ( ) ) log . trace ( "Complete " + tx ) ; if ( ! readOnly ) tx . commit ( ) ; else tx . rollback ( ) ;
public class Cluster { /** * Get the zone where this cluster is located . */ @ SuppressWarnings ( "WeakerAccess" ) public String getZone ( ) { } }
LocationName location = Verify . verifyNotNull ( LocationName . parse ( stateProto . getLocation ( ) ) ) ; // noinspection ConstantConditions return location . getLocation ( ) ;
public class CommerceOrderLocalServiceUtil { /** * Deletes the commerce order from the database . Also notifies the appropriate model listeners . * @ param commerceOrder the commerce order * @ return the commerce order that was removed * @ throws PortalException */ public static com . liferay . commerce . model . CommerceOrder deleteCommerceOrder ( com . liferay . commerce . model . CommerceOrder commerceOrder ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . deleteCommerceOrder ( commerceOrder ) ;
public class Channel { /** * Update channel with specified channel configuration * @ param updateChannelConfiguration Channel configuration * @ param signers signers * @ param orderer The specific orderer to use . * @ throws TransactionException * @ throws InvalidArgumentException */ public void updateChannelConfiguration ( UpdateChannelConfiguration updateChannelConfiguration , Orderer orderer , byte [ ] ... signers ) throws TransactionException , InvalidArgumentException { } }
checkChannelState ( ) ; checkOrderer ( orderer ) ; try { final long startLastConfigIndex = getLastConfigIndex ( orderer ) ; logger . trace ( format ( "startLastConfigIndex: %d. Channel config wait time is: %d" , startLastConfigIndex , CHANNEL_CONFIG_WAIT_TIME ) ) ; sendUpdateChannel ( updateChannelConfiguration . getUpdateChannelConfigurationAsBytes ( ) , signers , orderer ) ; long currentLastConfigIndex = - 1 ; final long nanoTimeStart = System . nanoTime ( ) ; // Try to wait to see the channel got updated but don ' t fail if we don ' t see it . do { currentLastConfigIndex = getLastConfigIndex ( orderer ) ; if ( currentLastConfigIndex == startLastConfigIndex ) { final long duration = TimeUnit . MILLISECONDS . convert ( System . nanoTime ( ) - nanoTimeStart , TimeUnit . NANOSECONDS ) ; if ( duration > CHANNEL_CONFIG_WAIT_TIME ) { logger . warn ( format ( "Channel %s did not get updated last config after %d ms, Config wait time: %d ms. startLastConfigIndex: %d, currentLastConfigIndex: %d " , name , duration , CHANNEL_CONFIG_WAIT_TIME , startLastConfigIndex , currentLastConfigIndex ) ) ; // waited long enough . . currentLastConfigIndex = startLastConfigIndex - 1L ; // just bail don ' t throw exception . } else { try { Thread . sleep ( ORDERER_RETRY_WAIT_TIME ) ; // try again sleep } catch ( InterruptedException e ) { TransactionException te = new TransactionException ( "update channel thread Sleep" , e ) ; logger . warn ( te . getMessage ( ) , te ) ; } } } logger . trace ( format ( "currentLastConfigIndex: %d" , currentLastConfigIndex ) ) ; } while ( currentLastConfigIndex == startLastConfigIndex ) ; } catch ( TransactionException e ) { logger . error ( format ( "Channel %s error: %s" , name , e . getMessage ( ) ) , e ) ; throw e ; } catch ( Exception e ) { String msg = format ( "Channel %s error: %s" , name , e . getMessage ( ) ) ; logger . error ( msg , e ) ; throw new TransactionException ( msg , e ) ; }
public class TableStreamer { /** * Activate the stream with the given predicates on the given table . * @ param context Context * @ param undoToken The undo token * @ param predicates Predicates associated with the stream * @ return true if activation succeeded . */ public boolean activate ( SystemProcedureExecutionContext context , boolean undo , byte [ ] predicates ) { } }
if ( ! context . activateTableStream ( m_tableId , m_type , undo , predicates ) ) { String tableName = CatalogUtil . getTableNameFromId ( context . getDatabase ( ) , m_tableId ) ; log . debug ( "Attempted to activate a table stream of type " + m_type + "for table " + tableName + " and failed" ) ; return false ; } return true ;
public class WorkspacePanel { /** * Assigns workspace names to the combo box into column 2. * @ param values */ public void setWorkspaceNames ( String [ ] values ) { } }
col2 . combo . setValueMap ( values ) ; if ( values . length > 0 ) { col2 . combo . setValue ( values [ 0 ] ) ; }
public class ConstraintSolver { /** * Get all the { @ link Variable } s contained in this { @ link ConstraintSolver } ' s { @ link ConstraintNetwork } . * @ param component Only { @ link Variable } s associated with the given label ( component ) should be returned . * @ return all the { @ link Variable } s contained in this { @ link ConstraintSolver } ' s { @ link ConstraintNetwork } . */ public Variable [ ] getVariables ( String component , Object ... markingsToExclude ) { } }
ArrayList < Variable > ret = this . components . get ( component ) ; if ( ret == null ) return new Variable [ 0 ] ; ArrayList < Variable > retFiltered = new ArrayList < Variable > ( ) ; for ( Variable v : ret ) { boolean found = false ; if ( v . getMarking ( ) != null ) for ( Object m : markingsToExclude ) { if ( m . equals ( v . getMarking ( ) ) ) { found = true ; break ; } } if ( ! found ) retFiltered . add ( v ) ; } return retFiltered . toArray ( new Variable [ retFiltered . size ( ) ] ) ;
public class LogicalContainerAwareReentrantTypeResolver { /** * Assign computed type references to the identifiable structural elements in the processed type . * @ return the stacked resolved types that shall be used in the computation . */ protected Map < JvmIdentifiableElement , ResolvedTypes > prepare ( ResolvedTypes resolvedTypes , IFeatureScopeSession featureScopeSession ) { } }
Map < JvmIdentifiableElement , ResolvedTypes > resolvedTypesByContext = Maps . newHashMapWithExpectedSize ( 3 ) ; JvmType root = getRootJvmType ( ) ; rootedInstances . add ( root ) ; recordExpressions ( root ) ; doPrepare ( resolvedTypes , featureScopeSession , root , resolvedTypesByContext ) ; return resolvedTypesByContext ;
public class CommandLine { /** * Equivalent to { @ code new CommandLine ( command ) . usage ( out , ansi ) } . * See { @ link # usage ( PrintStream , Help . Ansi ) } for details . * @ param command the object annotated with { @ link Command } , { @ link Option } and { @ link Parameters } * @ param out the print stream to print the help message to * @ param ansi whether the usage message should contain ANSI escape codes or not * @ throws IllegalArgumentException if the specified command object does not have a { @ link Command } , { @ link Option } or { @ link Parameters } annotation */ public static void usage ( Object command , PrintStream out , Help . Ansi ansi ) { } }
toCommandLine ( command , new DefaultFactory ( ) ) . usage ( out , ansi ) ;
public class CRDTReplicationMigrationService { /** * Schedules a { @ link CRDTMigrationTask } with a delay of { @ code delaySeconds } * seconds . */ void scheduleMigrationTask ( long delaySeconds ) { } }
if ( nodeEngine . getLocalMember ( ) . isLiteMember ( ) ) { return ; } nodeEngine . getExecutionService ( ) . schedule ( CRDT_REPLICATION_MIGRATION_EXECUTOR , new CRDTMigrationTask ( nodeEngine , this ) , delaySeconds , TimeUnit . SECONDS ) ;
public class BytecodeScanner { /** * Scan the raw bytecodes of a method . * @ param instructionList * the bytecodes * @ param callback * the callback object */ public void scan ( byte [ ] instructionList , Callback callback ) { } }
boolean wide = false ; for ( int index = 0 ; index < instructionList . length ; ) { short opcode = unsignedValueOf ( instructionList [ index ] ) ; callback . handleInstruction ( opcode , index ) ; if ( DEBUG ) { System . out . println ( index + ": " + Const . getOpcodeName ( opcode ) ) ; } switch ( opcode ) { // Single byte instructions . case Const . NOP : case Const . ACONST_NULL : case Const . ICONST_M1 : case Const . ICONST_0 : case Const . ICONST_1 : case Const . ICONST_2 : case Const . ICONST_3 : case Const . ICONST_4 : case Const . ICONST_5 : case Const . LCONST_0 : case Const . LCONST_1 : case Const . FCONST_0 : case Const . FCONST_1 : case Const . FCONST_2 : case Const . DCONST_0 : case Const . DCONST_1 : case Const . ILOAD_0 : case Const . ILOAD_1 : case Const . ILOAD_2 : case Const . ILOAD_3 : case Const . LLOAD_0 : case Const . LLOAD_1 : case Const . LLOAD_2 : case Const . LLOAD_3 : case Const . FLOAD_0 : case Const . FLOAD_1 : case Const . FLOAD_2 : case Const . FLOAD_3 : case Const . DLOAD_0 : case Const . DLOAD_1 : case Const . DLOAD_2 : case Const . DLOAD_3 : case Const . ALOAD_0 : case Const . ALOAD_1 : case Const . ALOAD_2 : case Const . ALOAD_3 : case Const . IALOAD : case Const . LALOAD : case Const . FALOAD : case Const . DALOAD : case Const . AALOAD : case Const . BALOAD : case Const . CALOAD : case Const . SALOAD : case Const . ISTORE_0 : case Const . ISTORE_1 : case Const . ISTORE_2 : case Const . ISTORE_3 : case Const . LSTORE_0 : case Const . LSTORE_1 : case Const . LSTORE_2 : case Const . LSTORE_3 : case Const . FSTORE_0 : case Const . FSTORE_1 : case Const . FSTORE_2 : case Const . FSTORE_3 : case Const . DSTORE_0 : case Const . DSTORE_1 : case Const . DSTORE_2 : case Const . DSTORE_3 : case Const . ASTORE_0 : case Const . ASTORE_1 : case Const . ASTORE_2 : case Const . ASTORE_3 : case Const . IASTORE : case Const . LASTORE : case Const . FASTORE : case Const . DASTORE : case Const . AASTORE : case Const . BASTORE : case Const . CASTORE : case Const . SASTORE : case Const . POP : case Const . POP2 : case Const . DUP : case Const . DUP_X1 : case Const . DUP_X2 : case Const . DUP2 : case Const . DUP2_X1 : case Const . DUP2_X2 : case Const . SWAP : case Const . IADD : case Const . LADD : case Const . FADD : case Const . DADD : case Const . ISUB : case Const . LSUB : case Const . FSUB : case Const . DSUB : case Const . IMUL : case Const . LMUL : case Const . FMUL : case Const . DMUL : case Const . IDIV : case Const . LDIV : case Const . FDIV : case Const . DDIV : case Const . IREM : case Const . LREM : case Const . FREM : case Const . DREM : case Const . INEG : case Const . LNEG : case Const . FNEG : case Const . DNEG : case Const . ISHL : case Const . LSHL : case Const . ISHR : case Const . LSHR : case Const . IUSHR : case Const . LUSHR : case Const . IAND : case Const . LAND : case Const . IOR : case Const . LOR : case Const . IXOR : case Const . LXOR : case Const . I2L : case Const . I2F : case Const . I2D : case Const . L2I : case Const . L2F : case Const . L2D : case Const . F2I : case Const . F2L : case Const . F2D : case Const . D2I : case Const . D2L : case Const . D2F : case Const . I2B : case Const . I2C : case Const . I2S : case Const . LCMP : case Const . FCMPL : case Const . FCMPG : case Const . DCMPL : case Const . DCMPG : case Const . IRETURN : case Const . LRETURN : case Const . FRETURN : case Const . DRETURN : case Const . ARETURN : case Const . RETURN : case Const . ARRAYLENGTH : case Const . ATHROW : case Const . MONITORENTER : case Const . MONITOREXIT : ++ index ; break ; // Two byte instructions . case Const . BIPUSH : case Const . LDC : case Const . NEWARRAY : index += 2 ; break ; // Instructions that can be used with the WIDE prefix . case Const . ILOAD : case Const . LLOAD : case Const . FLOAD : case Const . DLOAD : case Const . ALOAD : case Const . ISTORE : case Const . LSTORE : case Const . FSTORE : case Const . DSTORE : case Const . ASTORE : case Const . RET : if ( wide ) { // Skip opcode and two immediate bytes . index += 3 ; wide = false ; } else { // Skip opcode and one immediate byte . index += 2 ; } break ; // IINC is a special case Const . for WIDE handling case Const . IINC : if ( wide ) { // Skip opcode , two byte index , and two byte immediate // value . index += 5 ; wide = false ; } else { // Skip opcode , one byte index , and one byte immedate value . index += 3 ; } break ; // Three byte instructions . case Const . SIPUSH : case Const . LDC_W : case Const . LDC2_W : case Const . IFEQ : case Const . IFNE : case Const . IFLT : case Const . IFGE : case Const . IFGT : case Const . IFLE : case Const . IF_ICMPEQ : case Const . IF_ICMPNE : case Const . IF_ICMPLT : case Const . IF_ICMPGE : case Const . IF_ICMPGT : case Const . IF_ICMPLE : case Const . IF_ACMPEQ : case Const . IF_ACMPNE : case Const . GOTO : case Const . JSR : case Const . GETSTATIC : case Const . PUTSTATIC : case Const . GETFIELD : case Const . PUTFIELD : case Const . INVOKEVIRTUAL : case Const . INVOKESPECIAL : case Const . INVOKESTATIC : case Const . NEW : case Const . ANEWARRAY : case Const . CHECKCAST : case Const . INSTANCEOF : case Const . IFNULL : case Const . IFNONNULL : index += 3 ; break ; // Four byte instructions . case Const . MULTIANEWARRAY : index += 4 ; break ; // Five byte instructions . case Const . INVOKEINTERFACE : case Const . INVOKEDYNAMIC : case Const . GOTO_W : case Const . JSR_W : index += 5 ; break ; // TABLESWITCH - variable length . case Const . TABLESWITCH : { // Skip padding . int offset = index + 1 ; // skip the opcode offset += PAD [ offset & 3 ] ; assert ( offset & 3 ) == 0 ; // offset should now be posited at the default value // Extract min and max values . int low = extractInt ( instructionList , offset + 4 ) ; int high = extractInt ( instructionList , offset + 8 ) ; int tableSize = ( high - low ) + 1 ; if ( DEBUG ) { System . out . println ( "tableswitch: low=" + low + ", high=" + high + ", tableSize=" + tableSize ) ; } // Skip to next instruction . index = offset + 12 + ( tableSize * 4 ) ; } break ; // LOOKUPSWITCH - variable length . case Const . LOOKUPSWITCH : { // Skip padding . int offset = index + 1 ; // skip the opcode offset += PAD [ offset & 3 ] ; assert ( offset & 3 ) == 0 ; // offset should now be posited at the default value // Extract number of value / offset pairs . int numPairs = extractInt ( instructionList , offset + 4 ) ; if ( DEBUG ) { System . out . println ( "lookupswitch: numPairs=" + numPairs ) ; } // Skip to next instruction . index = offset + 8 + ( numPairs * 8 ) ; } break ; // Wide prefix . case Const . WIDE : wide = true ; ++ index ; break ; default : throw new IllegalArgumentException ( "Bad opcode " + opcode + " at offset " + index ) ; } if ( index < 0 ) { throw new IllegalStateException ( "index=" + index + ", opcode=" + opcode ) ; } }
public class ProvenanceChallenge2 { /** * / * ( non - Javadoc ) * @ see org . openprovenance . prov . tutorial . tutorial5 . Challenge # softmean ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String ) */ @ Override public Collection < StatementOrBundle > softmean ( String imgfile1 , String hdrfile1 , String imgfile2 , String hdrfile2 , String imgfile3 , String hdrfile3 , String imgfile4 , String hdrfile4 , String activity , String imgatlas , String imglabel , String hdratlas , String hdrlabel , String workflow , String agent ) { } }
Collection < StatementOrBundle > ll = new LinkedList < StatementOrBundle > ( ) ; Activity a9 = pFactory . newActivity ( pc ( activity ) ) ; pFactory . addType ( a9 , pFactory . newQualifiedName ( PRIM_NS , SOFTMEAN , PRIM_PREFIX ) , name . PROV_QUALIFIED_NAME ) ; Entity e15 = pFactory . newEntity ( pc ( imgfile1 ) ) ; Entity e16 = pFactory . newEntity ( pc ( hdrfile1 ) ) ; Entity e17 = pFactory . newEntity ( pc ( imgfile2 ) ) ; Entity e18 = pFactory . newEntity ( pc ( hdrfile2 ) ) ; Entity e19 = pFactory . newEntity ( pc ( imgfile3 ) ) ; Entity e20 = pFactory . newEntity ( pc ( hdrfile3 ) ) ; Entity e21 = pFactory . newEntity ( pc ( imgfile4 ) ) ; Entity e22 = pFactory . newEntity ( pc ( hdrfile4 ) ) ; ll . add ( newUsed ( a9 , ROLE_I1 , e15 ) ) ; ll . add ( newUsed ( a9 , ROLE_H1 , e16 ) ) ; ll . add ( newUsed ( a9 , ROLE_I2 , e17 ) ) ; ll . add ( newUsed ( a9 , ROLE_H2 , e18 ) ) ; ll . add ( newUsed ( a9 , ROLE_I3 , e19 ) ) ; ll . add ( newUsed ( a9 , ROLE_H3 , e20 ) ) ; ll . add ( newUsed ( a9 , ROLE_I4 , e21 ) ) ; ll . add ( newUsed ( a9 , ROLE_H4 , e22 ) ) ; Entity e23 = newFile ( pFactory , imgatlas , imglabel ) ; Entity e24 = newFile ( pFactory , hdratlas , hdrlabel ) ; ll . addAll ( Arrays . asList ( a9 , e15 , e16 , e17 , e18 , e19 , e20 , e21 , e22 , e23 , e24 ) ) ; ll . add ( newWasGeneratedBy ( e23 , ROLE_IMG , a9 ) ) ; ll . add ( newWasGeneratedBy ( e24 , ROLE_HDR , a9 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e15 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e16 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e17 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e18 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e19 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e20 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e21 ) ) ; ll . add ( newWasDerivedFrom ( e23 , e22 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e15 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e16 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e17 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e18 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e19 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e20 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e21 ) ) ; ll . add ( newWasDerivedFrom ( e24 , e22 ) ) ; ll . add ( pFactory . newAgent ( pc ( agent ) ) ) ; ll . add ( pFactory . newActivity ( pc ( workflow ) ) ) ; ll . add ( pFactory . newWasAssociatedWith ( null , pc ( workflow ) , pc ( agent ) ) ) ; ll . add ( pFactory . newWasStartedBy ( null , pc ( activity ) , null , pc ( workflow ) ) ) ; return ll ;
public class CpcSketch { /** * Returns a copy of this sketch * @ return a copy of this sketch */ CpcSketch copy ( ) { } }
final CpcSketch copy = new CpcSketch ( lgK , seed ) ; copy . numCoupons = numCoupons ; copy . mergeFlag = mergeFlag ; copy . fiCol = fiCol ; copy . windowOffset = windowOffset ; copy . slidingWindow = ( slidingWindow == null ) ? null : slidingWindow . clone ( ) ; copy . pairTable = ( pairTable == null ) ? null : pairTable . copy ( ) ; copy . kxp = kxp ; copy . hipEstAccum = hipEstAccum ; return copy ;
public class CmsSystemConfiguration { /** * Sets the mail settings . < p > * @ param mailSettings the mail settings to set . */ public void setMailSettings ( CmsMailSettings mailSettings ) { } }
m_mailSettings = mailSettings ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_MAIL_SETTINGS_1 , mailSettings ) ) ; }
public class DefaultBrokerCache { /** * Get an object for the specified factory , key , and broker at the scope selected by the factory . { @ link DefaultBrokerCache } * guarantees that calling this method from brokers with the same leaf scope will return the same object . */ @ SuppressWarnings ( value = "unchecked" ) < T , K extends SharedResourceKey > T getAutoScoped ( final SharedResourceFactory < T , K , S > factory , final K key , final SharedResourcesBrokerImpl < S > broker ) throws ExecutionException { } }
// figure out auto scope RawJobBrokerKey autoscopeCacheKey = new RawJobBrokerKey ( broker . getWrappedSelfScope ( ) , factory . getName ( ) , key ) ; ScopeWrapper < S > selectedScope = this . autoScopeCache . get ( autoscopeCacheKey , new Callable < ScopeWrapper < S > > ( ) { @ Override public ScopeWrapper < S > call ( ) throws Exception { return broker . getWrappedScope ( factory . getAutoScope ( broker , broker . getConfigView ( null , key , factory . getName ( ) ) ) ) ; } } ) ; // get actual object return getScoped ( factory , key , selectedScope , broker ) ;
public class FileListOperations { /** * Optimized implementation to perform both a remove and an add * @ param toRemove * @ param toAdd */ public void removeAndAdd ( final String toRemove , final String toAdd ) { } }
writeLock . lock ( ) ; try { FileListCacheValue fileList = getFileList ( ) ; boolean done = fileList . addAndRemove ( toAdd , toRemove ) ; if ( done ) { updateFileList ( fileList ) ; if ( trace ) { log . trace ( "Updated file listing: added " + toAdd + " and removed " + toRemove ) ; } } } finally { writeLock . unlock ( ) ; }
public class Descriptives { /** * Calculates the modes ( more than one if found ) . * @ param flatDataCollection * @ return */ public static FlatDataCollection mode ( FlatDataCollection flatDataCollection ) { } }
AssociativeArray frequencies = frequencies ( flatDataCollection ) ; int maxCounter = 0 ; FlatDataList modeList = new FlatDataList ( ) ; for ( Map . Entry < Object , Object > entry : frequencies . entrySet ( ) ) { Object key = entry . getKey ( ) ; int count = ( ( Number ) entry . getValue ( ) ) . intValue ( ) ; if ( maxCounter == count ) { modeList . add ( key ) ; } else if ( maxCounter < count ) { maxCounter = count ; modeList . clear ( ) ; modeList . add ( key ) ; } } return modeList . toFlatDataCollection ( ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertTileSetColorCSPACEToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class Util { /** * Clears ( sets to null ) all blocks between off ( inclusive ) and off + len ( exclusive ) in the given * array . */ static void clear ( byte [ ] [ ] blocks , int off , int len ) { } }
// this is significantly faster than looping or Arrays . fill ( which loops ) , particularly when // the length of the slice to be cleared is < = to ARRAY _ LEN ( in that case , it ' s faster by a // factor of 2) int remaining = len ; while ( remaining > ARRAY_LEN ) { System . arraycopy ( NULL_ARRAY , 0 , blocks , off , ARRAY_LEN ) ; off += ARRAY_LEN ; remaining -= ARRAY_LEN ; } System . arraycopy ( NULL_ARRAY , 0 , blocks , off , remaining ) ;
public class MenuDrawer { /** * Attaches the MenuDrawer to the Activity . * @ param activity The activity the menu drawer will be attached to . * @ param position Where to position the menu . * @ return The created MenuDrawer instance . */ public static MenuDrawer attach ( Activity activity , Position position ) { } }
return attach ( activity , Type . BEHIND , position ) ;
public class ARCoreAnchor { /** * Update the anchor based on arcore best knowledge of the world * @ param scale */ protected void update ( float scale ) { } }
// Updates only when the plane is in the scene GVRSceneObject owner = getOwnerObject ( ) ; if ( ( owner != null ) && isEnabled ( ) && owner . isEnabled ( ) ) { convertFromARtoVRSpace ( scale ) ; }
public class TextFormatter { /** * Similar to { @ link # appendElapsedAndSize ( StringBuilder , long , long , long ) } except that this method * creates a new { @ link StringBuilder } . */ public static StringBuilder elapsedAndSize ( long startTimeNanos , long endTimeNanos , long size ) { } }
final StringBuilder buf = new StringBuilder ( 16 ) ; appendElapsedAndSize ( buf , startTimeNanos , endTimeNanos , size ) ; return buf ;
public class WebRiskServiceV1Beta1Client { /** * Gets the full hashes that match the requested hash prefix . This is used after a hash prefix is * looked up in a threatList and there is a match . The client side threatList only holds partial * hashes so the client must query this method to determine if there is a full hash match of a * threat . * < p > Sample code : * < pre > < code > * try ( WebRiskServiceV1Beta1Client webRiskServiceV1Beta1Client = WebRiskServiceV1Beta1Client . create ( ) ) { * ByteString hashPrefix = ByteString . copyFromUtf8 ( " " ) ; * List & lt ; ThreatType & gt ; threatTypes = new ArrayList & lt ; & gt ; ( ) ; * SearchHashesResponse response = webRiskServiceV1Beta1Client . searchHashes ( hashPrefix , threatTypes ) ; * < / code > < / pre > * @ param hashPrefix A hash prefix , consisting of the most significant 4-32 bytes of a SHA256 * hash . For JSON requests , this field is base64 - encoded . * @ param threatTypes Required . The ThreatLists to search in . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final SearchHashesResponse searchHashes ( ByteString hashPrefix , List < ThreatType > threatTypes ) { } }
SearchHashesRequest request = SearchHashesRequest . newBuilder ( ) . setHashPrefix ( hashPrefix ) . addAllThreatTypes ( threatTypes ) . build ( ) ; return searchHashes ( request ) ;
public class MessageProcessor { /** * Indicates that the WAS server is closing for E - business . * This event is propagated to all instances of mediation point , so the * mediations can all be closed properly . * Part of the JsEngineComponent interface . */ @ Override public void serverStopping ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "serverStopping" ) ; synchronized ( _mpStartStopLock ) { _isWASOpenForEBusiness = false ; } // If the destination manager is null , then don ' t even attempt this . // There is a possibility that the destination manager instance is // corrupt . if ( _destinationManager != null ) _destinationManager . announceWASClosedForEBusiness ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "serverStopping" ) ;
public class Logger { /** * Log a message and a throwable with level { @ link LogLevel # VERBOSE } . * @ param msg the message to log * @ param tr the throwable to be log */ public void v ( String msg , Throwable tr ) { } }
println ( LogLevel . VERBOSE , msg , tr ) ;
public class PhoneNumberUtil { /** * parse phone number . * @ param pphoneNumber phone number as string * @ param pcountryCode iso code of country * @ return PhoneNumberData */ public ValueWithPos < PhoneNumberData > parsePhoneNumber ( final ValueWithPos < String > pphoneNumber , final String pcountryCode ) { } }
return this . parsePhoneNumber ( pphoneNumber , pcountryCode , Locale . ROOT ) ;
public class PubSubInputHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DownstreamControl # sendFlushedMessage ( com . ibm . ws . sib . utils . SIBUuid12) * This is only called from attemptFlush ( ) as flushQuery ' s are processed * by the PubSubOuputHandler */ @ Override public void sendFlushedMessage ( SIBUuid8 ignore , SIBUuid12 streamID ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendFlushedMessage" , new Object [ ] { streamID } ) ; // This flush should be broadcast to all downstream neighbors // for this cell as it is the result of a startFlush ( ) // This is a bit of a kludge since we may be sending flushes to cells // which have no information about a stream . HashMap allPubSubOutputHandlers = _destination . getAllPubSubOutputHandlers ( ) ; try { Iterator itr = allPubSubOutputHandlers . values ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { // Get the appropriate target cellule and forward the message PubSubOutputHandler handler = ( PubSubOutputHandler ) itr . next ( ) ; // Note that the null Cellule we pass in here is ignored // as each OutputHandler knows its targetCellule handler . sendFlushedMessage ( null , streamID ) ; // Also , tell the handler to remove any information for this // stream since it ' s just been flushed . handler . removeStream ( streamID ) ; } } finally { // By calling the getAllPubSubOutputHandlers it will lock the // handlers _destination . unlockPubsubOutputHandlers ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendFlushedMessage" ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ArrayAssociationType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link ArrayAssociationType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "members" ) public JAXBElement < ArrayAssociationType > createMembers ( ArrayAssociationType value ) { } }
return new JAXBElement < ArrayAssociationType > ( _Members_QNAME , ArrayAssociationType . class , null , value ) ;
public class KafkaMsgConsumer { /** * Gets number of partitions of a topic . * @ param topicName * @ return topic ' s number of partitions , or { @ code 0 } if the topic does not * exist * @ since 1.2.0 */ public int getNumPartitions ( String topicName ) { } }
Map < String , List < PartitionInfo > > topicInfo = getTopicInfo ( ) ; List < PartitionInfo > partitionInfo = topicInfo != null ? topicInfo . get ( topicName ) : null ; return partitionInfo != null ? partitionInfo . size ( ) : 0 ;
public class ExtensionStdMenus { /** * private PopupMenuShowResponseInBrowser getPopupMenuShowResponseInBrowser ( int menuIndex ) { if * ( popupMenuShowResponseInBrowser = = null ) { / / TODO ! popupMenuShowResponseInBrowser = new * PopupMenuShowResponseInBrowser ( Constant . messages . getString ( " history . showresponse . popup " ) ) ; * popupMenuShowResponseInBrowser . setMenuIndex ( menuIndex ) ; } return popupMenuShowResponseInBrowser ; } */ private PopupExcludeFromProxyMenu getPopupExcludeFromProxyMenu ( int menuIndex ) { } }
if ( popupExcludeFromProxyMenu == null ) { popupExcludeFromProxyMenu = new PopupExcludeFromProxyMenu ( ) ; popupExcludeFromProxyMenu . setMenuIndex ( menuIndex ) ; } return popupExcludeFromProxyMenu ;
public class StorIOSQLite { /** * Allows observer changes of required table . * Notice that { @ link StorIOSQLite } knows only about changes * that happened as a result of Put or Delete Operations executed * on this instance of { @ link StorIOSQLite } . * Emission may happen on any thread that performed Put or Delete operation , * so it ' s recommended to apply { @ link Flowable # observeOn ( io . reactivex . Scheduler ) } * if you need to receive events on a special thread . * Notice , that returned { @ link Flowable } is " Hot Flowable " , it never ends , which means , * that you should manually dispose from it to prevent memory leak . * Also , it can cause BackPressure problems . * @ param table table name to monitor . * @ return { @ link io . reactivex . Flowable } of { @ link Changes } subscribed to changes of required table . */ @ NonNull public Flowable < Changes > observeChangesInTable ( @ NonNull String table , @ NonNull BackpressureStrategy backpressureStrategy ) { } }
checkNotEmpty ( table , "Table can not be null or empty" ) ; return observeChangesInTables ( Collections . singleton ( table ) , backpressureStrategy ) ;
public class HttpHelper { /** * Downloads the entire resource instead of part . * @ param uri URI to retrieve * @ param type expected text - like MIME type of that content * @ return content as a { @ code String } * @ throws IOException if the content can ' t be retrieved because of a bad URI , network problem , etc . * @ see # downloadViaHttp ( String , HttpHelper . ContentType , int ) */ public static CharSequence downloadViaHttp ( String uri , ContentType type ) throws IOException { } }
return downloadViaHttp ( uri , type , Integer . MAX_VALUE ) ;
public class MatrixFeatures_ZDRM { /** * Checks to see if any element in the matrix is NaN . * @ param m A matrix . Not modified . * @ return True if any element in the matrix is NaN . */ public static boolean hasNaN ( ZMatrixD1 m ) { } }
int length = m . getDataLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { if ( Double . isNaN ( m . data [ i ] ) ) return true ; } return false ;
public class AbstractGenerateSoyEscapingDirectiveCode { /** * Called reflectively when Ant sees { @ code < libdefined > } . */ public void addConfiguredLibdefined ( FunctionNamePredicate p ) { } }
final Pattern namePattern = p . namePattern ; if ( namePattern == null ) { throw new IllegalStateException ( "Please specify a pattern attribute for <libdefined>" ) ; } availableIdentifiers = availableIdentifiers . or ( identifierName -> namePattern . matcher ( identifierName ) . matches ( ) ) ;
public class MpRoSitePool { /** * Inform the pool that the work associated with the given txnID is complete */ void completeWork ( long txnId ) { } }
if ( m_shuttingDown ) { return ; } MpRoSiteContext site = m_busySites . remove ( txnId ) ; if ( site == null ) { throw new RuntimeException ( "No busy site for txnID: " + txnId + " found, shouldn't happen." ) ; } // check the catalog versions , only push back onto idle if the catalog hasn ' t changed // otherwise , just let it get garbage collected and let doWork ( ) construct new ones for the // pool with the updated catalog . if ( site . getCatalogCRC ( ) == m_catalogContext . getCatalogCRC ( ) && site . getCatalogVersion ( ) == m_catalogContext . catalogVersion ) { m_idleSites . push ( site ) ; } else { site . shutdown ( ) ; m_allSites . remove ( site ) ; }
public class InteractiveElement { /** * ( non - Javadoc ) * @ see qc . automation . framework . widget . IClickableElement # doubleClick ( ) */ @ Override public void doubleClick ( ) throws WidgetException { } }
try { Actions builder = new Actions ( getGUIDriver ( ) . getWrappedDriver ( ) ) ; synchronized ( InteractiveElement . class ) { getGUIDriver ( ) . focus ( ) ; builder . doubleClick ( getWebElement ( ) ) . build ( ) . perform ( ) ; } } catch ( Exception e ) { throw new WidgetException ( "Error while double clicking element" , getByLocator ( ) , e ) ; }
public class ForkJoinTask { /** * Returns the result of the computation when it { @ link # isDone is * done } . This method differs from { @ link # get ( ) } in that * abnormal completion results in { @ code RuntimeException } or * { @ code Error } , not { @ code ExecutionException } , and that * interrupts of the calling thread do < em > not < / em > cause the * method to abruptly return by throwing { @ code * InterruptedException } . * @ return the computed result */ public final V join ( ) { } }
int s ; if ( ( s = doJoin ( ) & DONE_MASK ) != NORMAL ) reportException ( s ) ; return getRawResult ( ) ;
public class DisambiguatedAlchemyEntity { /** * Set link to OpenCyc . Note : Provided only for entities that exist in this * that exist in this linked data - set . * @ param opencyc link to OpenCyc */ public void setOpencyc ( String opencyc ) { } }
if ( opencyc != null ) { opencyc = opencyc . trim ( ) ; } this . opencyc = opencyc ;
public class CatalogEntryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CatalogEntry catalogEntry , ProtocolMarshaller protocolMarshaller ) { } }
if ( catalogEntry == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( catalogEntry . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( catalogEntry . getTableName ( ) , TABLENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractRegistry { /** * Helper used to get or create an instance of a core meter type . This is mostly used * internally to this implementation , but may be useful in rare cases for creating * customizations based on a core type in a sub - class . * @ param id * Identifier used to lookup this meter in the registry . * @ param cls * Type of the meter . * @ param dflt * Default value used if there is a failure during the lookup and it is not configured * to propagate . * @ param factory * Function for creating a new instance of the meter type if one is not already available * in the registry . * @ return * Instance of the meter . */ @ SuppressWarnings ( "unchecked" ) protected < T extends Meter > T getOrCreate ( Id id , Class < T > cls , T dflt , Function < Id , T > factory ) { } }
try { Preconditions . checkNotNull ( id , "id" ) ; Meter m = Utils . computeIfAbsent ( meters , id , i -> compute ( factory . apply ( i ) , dflt ) ) ; if ( ! cls . isAssignableFrom ( m . getClass ( ) ) ) { logTypeError ( id , cls , m . getClass ( ) ) ; m = dflt ; } return ( T ) m ; } catch ( Exception e ) { propagate ( e ) ; return dflt ; }
public class ReedSolomonDecoder { /** * Decode the inputs provided and write to the output . * @ param inputs array of inputs . * @ param erasedLocations indexes in the inputs which are known to be erased . * @ param erasedLocationToFix index in the inputs which needs to be fixed . * @ param limit maximum number of bytes to be written . * @ param out the output . * @ return size of recovered bytes * @ throws IOException */ long writeFixedBlock ( FSDataInputStream [ ] inputs , int [ ] erasedLocations , int erasedLocationToFix , long limit , OutputStream out , Progressable reporter , ParallelStreamReader parallelReader , CRC32 crc ) throws IOException { } }
LOG . info ( "Need to write " + limit + " bytes for erased location index " + erasedLocationToFix ) ; if ( crc != null ) { crc . reset ( ) ; } int [ ] tmp = new int [ inputs . length ] ; int [ ] decoded = new int [ erasedLocations . length ] ; // Loop while the number of written bytes is less than the max . long written ; for ( written = 0 ; written < limit ; ) { erasedLocations = readFromInputs ( inputs , erasedLocations , limit , reporter , parallelReader ) ; if ( decoded . length != erasedLocations . length ) { decoded = new int [ erasedLocations . length ] ; } int toWrite = ( int ) Math . min ( ( long ) bufSize , limit - written ) ; int partSize = ( int ) Math . ceil ( bufSize * 1.0 / parallelism ) ; try { long startTime = System . currentTimeMillis ( ) ; for ( int i = 0 ; i < parallelism ; i ++ ) { decodeOps . acquire ( 1 ) ; int start = i * partSize ; int count = Math . min ( bufSize - start , partSize ) ; parallelDecoder . execute ( new DecodeOp ( readBufs , writeBufs , start , count , erasedLocations , reedSolomonCode [ i ] ) ) ; } decodeOps . acquire ( parallelism ) ; decodeOps . release ( parallelism ) ; decodeTime += ( System . currentTimeMillis ( ) - startTime ) ; } catch ( InterruptedException e ) { throw new IOException ( "Interrupted while waiting for read result" ) ; } for ( int i = 0 ; i < erasedLocations . length ; i ++ ) { if ( erasedLocations [ i ] == erasedLocationToFix ) { out . write ( writeBufs [ i ] , 0 , toWrite ) ; if ( crc != null ) { crc . update ( writeBufs [ i ] , 0 , toWrite ) ; } written += toWrite ; break ; } } } return written ;
public class URI { /** * Get the path for this URI ( optionally with the query string and * fragment ) . * @ param p _ includeQueryString if true ( and query string is not null ) , * then a " ? " followed by the query string * will be appended * @ param p _ includeFragment if true ( and fragment is not null ) , * then a " # " followed by the fragment * will be appended * @ return the path for this URI possibly including the query string * and fragment */ public String getPath ( boolean p_includeQueryString , boolean p_includeFragment ) { } }
StringBuffer pathString = new StringBuffer ( m_path ) ; if ( p_includeQueryString && m_queryString != null ) { pathString . append ( '?' ) ; pathString . append ( m_queryString ) ; } if ( p_includeFragment && m_fragment != null ) { pathString . append ( '#' ) ; pathString . append ( m_fragment ) ; } return pathString . toString ( ) ;
public class Nodes { /** * Creates a new Node ( of the same type as the original node ) that * is similar to the orginal but doesn ' t contain any empty text or * CDATA nodes and where all textual content including attribute * values or comments are trimmed and normalized . * < p > " normalized " in this context means all whitespace characters * are replaced by space characters and consecutive whitespace * characaters are collapsed . < / p > */ public static Node normalizeWhitespace ( Node original ) { } }
Node cloned = original . cloneNode ( true ) ; cloned . normalize ( ) ; handleWsRec ( cloned , true ) ; return cloned ;
public class RegionOperationClient { /** * Retrieves the specified region - specific Operations resource . * < p > Sample code : * < pre > < code > * try ( RegionOperationClient regionOperationClient = RegionOperationClient . create ( ) ) { * ProjectRegionOperationName operation = ProjectRegionOperationName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ OPERATION ] " ) ; * Operation response = regionOperationClient . getRegionOperation ( operation . toString ( ) ) ; * < / code > < / pre > * @ param operation Name of the Operations resource to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation getRegionOperation ( String operation ) { } }
GetRegionOperationHttpRequest request = GetRegionOperationHttpRequest . newBuilder ( ) . setOperation ( operation ) . build ( ) ; return getRegionOperation ( request ) ;
public class HadoopArchives { /** * this assumes that there are two types of files file / dir * @ param fs the input filesystem * @ param p the top level path * @ param out the list of paths output of recursive ls * @ throws IOException */ private void recursivels ( FileSystem fs , Path p , List < FileStatus > out ) throws IOException { } }
FileStatus fstatus = fs . getFileStatus ( p ) ; if ( ! fstatus . isDir ( ) ) { out . add ( fstatus ) ; return ; } else { out . add ( fstatus ) ; FileStatus [ ] listStatus = fs . listStatus ( p ) ; for ( FileStatus stat : listStatus ) { recursivels ( fs , stat . getPath ( ) , out ) ; } }
public class WhiteboxImpl { /** * Copy state . * @ param object the object * @ param context the context * @ param strategy The field matching strategy . */ static void copyState ( Object object , Object context , FieldMatchingStrategy strategy ) { } }
if ( object == null ) { throw new IllegalArgumentException ( "object to set state cannot be null" ) ; } else if ( context == null ) { throw new IllegalArgumentException ( "context cannot be null" ) ; } else if ( strategy == null ) { throw new IllegalArgumentException ( "strategy cannot be null" ) ; } Set < Field > allFields = isClass ( context ) ? getAllStaticFields ( getType ( context ) ) : getAllInstanceFields ( context ) ; for ( Field field : allFields ) { try { final boolean isStaticField = Modifier . isStatic ( field . getModifiers ( ) ) ; setInternalState ( isStaticField ? getType ( object ) : object , field . getType ( ) , field . get ( context ) ) ; } catch ( FieldNotFoundException e ) { if ( strategy == FieldMatchingStrategy . STRICT ) { throw e ; } } catch ( IllegalAccessException e ) { // Should never happen throw new RuntimeException ( "Internal Error: Failed to get the field value in method setInternalStateFromContext." , e ) ; } }
public class PcapPktHdr { /** * Create new PcapPktHdr instance . * @ param caplen capture length . * @ param len length . * @ param tvSec tv _ sec . * @ param tvUsec tv _ usec . * @ return returns PcapPktHdr . */ public static PcapPktHdr newInstance ( final int caplen , final int len , final int tvSec , final long tvUsec ) { } }
return new PcapPktHdr ( caplen , len , tvSec , tvUsec ) ;
public class Reflection { /** * Serialize a ( { @ linkplain Serializable serializable } ) lambda . * @ param lambda the ( { @ linkplain Serializable serializable } ) lambda to serialize . * @ return the serialized form of the given lambda . */ private static SerializedLambda serializedLambda ( Serializable lambda ) { } }
try { Method replaceMethod = lambda . getClass ( ) . getDeclaredMethod ( "writeReplace" ) ; replaceMethod . setAccessible ( true ) ; return ( SerializedLambda ) replaceMethod . invoke ( lambda ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Reflection failed." ) ; }
public class CommsByteBuffer { /** * Puts an SIDestinationAddress into the byte buffer . * @ param destAddr * @ param fapLevel the FAP level of this connection . Used to decide what information to flow down the wire . */ public synchronized void putSIDestinationAddress ( SIDestinationAddress destAddr , short fapLevel ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "putSIDestinationAddress" , new Object [ ] { destAddr , Short . valueOf ( fapLevel ) } ) ; checkValid ( ) ; String destName = null ; String busName = null ; byte [ ] uuid = new byte [ 0 ] ; boolean localOnly = false ; if ( destAddr != null ) { destName = destAddr . getDestinationName ( ) ; busName = destAddr . getBusName ( ) ; // If the user has passed in something that we do not know how to serialize , do not even // try and do anything with the other parts of it . if ( destAddr instanceof JsDestinationAddress ) { JsDestinationAddress jsDestAddr = ( JsDestinationAddress ) destAddr ; // If the isMediation ( ) flag has been set , ensure we propagate this as a special UUId . // We can do this because a mediation destination only carries a name and the UUId // field is actually redundant . // lohith liberty change /* if ( jsDestAddr . isFromMediation ( ) ) uuid = new byte [ 1 ] ; uuid [ 0 ] = CommsConstants . DESTADDR _ ISFROMMEDIATION ; else */ { if ( jsDestAddr . getME ( ) != null ) uuid = jsDestAddr . getME ( ) . toByteArray ( ) ; localOnly = jsDestAddr . isLocalOnly ( ) ; } } } putShort ( ( short ) uuid . length ) ; if ( uuid . length != 0 ) put ( uuid ) ; putString ( destName ) ; putString ( busName ) ; // Only send localOnly field if fapLevel > = 9 so we don ' t break down - level servers / client . if ( fapLevel >= JFapChannelConstants . FAP_VERSION_9 ) { put ( localOnly ? CommsConstants . TRUE_BYTE : CommsConstants . FALSE_BYTE ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "putSIDestinationAddress" ) ;
public class Configuration { /** * Set the array of string values for the < code > name < / code > property as * as comma delimited values . * @ param name property name . * @ param values The values */ public void setStrings ( String name , String ... values ) { } }
set ( name , StringUtils . join ( values , "," ) ) ;
public class JSDocInfoBuilder { /** * Records that the { @ link JSDocInfo } being built should have its * { @ link JSDocInfo # isFinal ( ) } flag set to { @ code true } . * @ return { @ code true } if the finality was recorded and { @ code false } if it was already defined */ public boolean recordFinality ( ) { } }
if ( ! currentInfo . isFinal ( ) ) { currentInfo . setFinal ( true ) ; populated = true ; return true ; } else { return false ; }
public class BlockManagement { /** * Copies the specified interval of characters for the array . * @ return specified interval */ private String copy ( final char [ ] array , final int start , final int end ) { } }
StringBuilder text = new StringBuilder ( ) ; for ( int j = start ; j < end ; j ++ ) { text . append ( array [ j ] ) ; } return text . toString ( ) ;
public class Strands { /** * Awaits the termination of a given strand , at most for the timeout duration specified . * This method blocks until this strand terminates or the timeout elapses . * @ param strand the strand to join . May be an object of type { @ code Strand } , { @ code Fiber } or { @ code Thread } . * @ param timeout the maximum duration to wait for the strand to terminate in the time unit specified by { @ code unit } . * @ param unit the time unit of { @ code timeout } . * @ throws TimeoutException if this strand did not terminate by the time the timeout has elapsed . * @ throws ExecutionException if this strand has terminated as a result of an uncaught exception * ( which will be the { @ link Throwable # getCause ( ) cause } of the thrown { @ code ExecutionException } . * @ throws InterruptedException */ public static void join ( Object strand , long timeout , TimeUnit unit ) throws ExecutionException , InterruptedException , TimeoutException { } }
Strand . join ( strand , timeout , unit ) ;
public class Logger { /** * Log a message at the WARN level . * @ param marker The marker specific to this log statement * @ param message the message string to be logged * @ param o1 the first argument * @ param o2 the second argument * @ since 1.0.0 */ public void warn ( final Marker marker , final String message , final Object o1 , final Object o2 ) { } }
log . warn ( marker , sanitize ( message ) , o1 , o1 ) ;
public class PossibleMemoryBloat { /** * implements the visitor to look for methods that empty a bloatable field if found , remove these fields from the current list * @ param seen * the opcode of the currently parsed instruction */ @ Override public void sawOpcode ( int seen ) { } }
XField userValue = null ; try { stack . precomputation ( this ) ; if ( ( seen == Const . INVOKEVIRTUAL ) || ( seen == Const . INVOKEINTERFACE ) || ( seen == Const . INVOKEDYNAMIC ) ) { String sig = getSigConstantOperand ( ) ; int argCount = SignatureUtils . getNumParameters ( sig ) ; if ( stack . getStackDepth ( ) > argCount ) { OpcodeStack . Item itm = stack . getStackItem ( argCount ) ; XField field = itm . getXField ( ) ; if ( ( field != null ) && bloatableCandidates . containsKey ( field ) ) { checkMethodAsDecreasingOrIncreasing ( field ) ; } String calledMethod = getNameConstantOperand ( ) ; if ( "iterator" . equals ( calledMethod ) ) { userValue = ( XField ) itm . getUserValue ( ) ; if ( userValue == null ) { userValue = field ; } } else { if ( field == null ) { field = ( XField ) itm . getUserValue ( ) ; } if ( field != null ) { if ( mapSubsets . contains ( calledMethod ) ) { userValue = field ; } else if ( "remove" . equals ( calledMethod ) && "java/util/Iterator" . equals ( getClassConstantOperand ( ) ) ) { bloatableCandidates . remove ( field ) ; bloatableFields . remove ( field ) ; } } } for ( int i = 0 ; i < argCount ; i ++ ) { itm = stack . getStackItem ( i ) ; jaxbContextRegs . remove ( itm . getRegisterNumber ( ) ) ; } } } else if ( seen == Const . PUTFIELD ) { if ( stack . getStackDepth ( ) > 0 ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; jaxbContextRegs . remove ( item . getRegisterNumber ( ) ) ; } } else if ( seen == Const . PUTSTATIC ) { if ( stack . getStackDepth ( ) > 0 ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; if ( nonBloatableSigs . contains ( item . getSignature ( ) ) ) { XField field = item . getXField ( ) ; bloatableFields . remove ( field ) ; } jaxbContextRegs . remove ( item . getRegisterNumber ( ) ) ; } } // Should not include private methods else if ( seen == Const . ARETURN ) { removeFieldsThatGetReturned ( ) ; } else if ( OpcodeUtils . isALoad ( seen ) ) { userValue = userValues . get ( RegisterUtils . getALoadReg ( this , seen ) ) ; } else if ( OpcodeUtils . isAStore ( seen ) ) { if ( stack . getStackDepth ( ) > 0 ) { OpcodeStack . Item itm = stack . getStackItem ( 0 ) ; userValues . put ( RegisterUtils . getAStoreReg ( this , seen ) , ( XField ) itm . getUserValue ( ) ) ; XMethod xm = itm . getReturnValueOf ( ) ; if ( xm != null ) { FQMethod calledMethod = new FQMethod ( xm . getClassName ( ) . replace ( '.' , '/' ) , xm . getName ( ) , xm . getSignature ( ) ) ; if ( jaxbNewInstance . equals ( calledMethod ) ) { jaxbContextRegs . put ( RegisterUtils . getAStoreReg ( this , seen ) , getPC ( ) ) ; } } } } } finally { stack . sawOpcode ( this , seen ) ; if ( ( userValue != null ) && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item itm = stack . getStackItem ( 0 ) ; itm . setUserValue ( userValue ) ; } }
public class GameOfLife { /** * It is not very efficient but it is simple enough */ public static void emptyMatrix ( byte [ ] [ ] matrix , int maxX , int maxY ) { } }
for ( int i = 0 ; i < maxX ; i ++ ) { for ( int j = 0 ; j < maxY ; j ++ ) { matrix [ i ] [ j ] = 0 ; } }
public class ProcedureExtensions { /** * Curries a procedure that takes two arguments . * @ param procedure * the original procedure . May not be < code > null < / code > . * @ param argument * the fixed first argument of { @ code procedure } . * @ return a procedure that takes one argument . Never < code > null < / code > . */ @ Pure public static < P1 , P2 > Procedure1 < P2 > curry ( final Procedure2 < ? super P1 , ? super P2 > procedure , final P1 argument ) { } }
if ( procedure == null ) throw new NullPointerException ( "procedure" ) ; return new Procedure1 < P2 > ( ) { @ Override public void apply ( P2 p ) { procedure . apply ( argument , p ) ; } } ;
public class SignalHandler { /** * Register some signal handlers . * @ param LOG The slf4j logger */ public static void register ( final Logger LOG ) { } }
synchronized ( SignalHandler . class ) { if ( registered ) { return ; } registered = true ; final String [ ] SIGNALS = OperatingSystem . isWindows ( ) ? new String [ ] { "TERM" , "INT" } : new String [ ] { "TERM" , "HUP" , "INT" } ; StringBuilder bld = new StringBuilder ( ) ; bld . append ( "Registered UNIX signal handlers for [" ) ; String separator = "" ; for ( String signalName : SIGNALS ) { try { new Handler ( signalName , LOG ) ; bld . append ( separator ) ; bld . append ( signalName ) ; separator = ", " ; } catch ( Exception e ) { LOG . info ( "Error while registering signal handler" , e ) ; } } bld . append ( "]" ) ; LOG . info ( bld . toString ( ) ) ; }
public class Quaterniond { /** * Add the quaternion < code > ( x , y , z , w ) < / code > to this quaternion . * @ param x * the x component of the vector part * @ param y * the y component of the vector part * @ param z * the z component of the vector part * @ param w * the real / scalar component * @ return this */ public Quaterniond add ( double x , double y , double z , double w ) { } }
return add ( x , y , z , w , this ) ;
public class ItemUtils { /** * Converts a map of string to simple objects into the low - level * representation ; or null if the input is null . */ public static Map < String , AttributeValue > fromSimpleMap ( Map < String , Object > map ) { } }
if ( map == null ) return null ; // row with multiple attributes Map < String , AttributeValue > result = new LinkedHashMap < String , AttributeValue > ( ) ; for ( Map . Entry < String , Object > entry : map . entrySet ( ) ) result . put ( entry . getKey ( ) , toAttributeValue ( entry . getValue ( ) ) ) ; return result ;
public class AbstractSearchStructure { /** * Returns a { @ link LatLng } object with the geolocation of the vector with the given internal id or null * if the internal id does not exist . Accesses the BDB store ! * @ param iid * The internal id of the vector * @ return The geolocation mapped to the given internal id or null if the internal id does not exist */ public LatLng getGeolocation ( int iid ) { } }
if ( iid < 0 || iid > loadCounter ) { System . out . println ( "Internal id " + iid + " is out of range!" ) ; return null ; } DatabaseEntry key = new DatabaseEntry ( ) ; IntegerBinding . intToEntry ( iid , key ) ; DatabaseEntry data = new DatabaseEntry ( ) ; if ( ( iidToGeolocationDB . get ( null , key , data , null ) == OperationStatus . SUCCESS ) ) { TupleInput input = TupleBinding . entryToInput ( data ) ; double latitude = input . readDouble ( ) ; double longitude = input . readDouble ( ) ; LatLng geolocation = new LatLng ( latitude , longitude ) ; return geolocation ; } else { System . out . println ( "Internal id " + iid + " is in range but gelocation was not found." ) ; return null ; }
public class SipServletRequestImpl { /** * ( non - Javadoc ) * @ see java . io . Externalizable # readExternal ( java . io . ObjectInput ) */ public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "readExternal" ) ; } super . readExternal ( in ) ; String messageString = in . readUTF ( ) ; try { message = SipFactoryImpl . messageFactory . createRequest ( messageString ) ; } catch ( ParseException e ) { throw new IllegalArgumentException ( "Message " + messageString + " previously serialized could not be reparsed" , e ) ; } boolean isLinkedRequestSerialized = in . readBoolean ( ) ; if ( isLinkedRequestSerialized ) { linkedRequest = ( SipServletRequestImpl ) in . readObject ( ) ; } createDialog = in . readBoolean ( ) ; String routingDirectiveString = in . readUTF ( ) ; if ( ! routingDirectiveString . equals ( "" ) ) { routingDirective = SipApplicationRoutingDirective . valueOf ( routingDirectiveString ) ; } String routingStateString = in . readUTF ( ) ; if ( ! routingStateString . equals ( "" ) ) { routingState = RoutingState . valueOf ( routingStateString ) ; } boolean isRoutingRegionSet = in . readBoolean ( ) ; if ( isRoutingRegionSet ) { routingRegion = ( SipApplicationRoutingRegion ) in . readObject ( ) ; } isInitial = in . readBoolean ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "readExternal - isInitial=" + isInitial ) ; } isFinalResponseGenerated = in . readBoolean ( ) ; is1xxResponseGenerated = in . readBoolean ( ) ;
public class JsHdrsImpl { /** * Clear the Guaranteed Delivery Remote Browse information in the message . * Javadoc description supplied by JsMessage interface . */ public void clearGuaranteedRemoteBrowse ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "clearGuaranteedRemoteBrowse" ) ; getHdr2 ( ) . setChoiceField ( JsHdr2Access . GUARANTEEDREMOTEBROWSE , JsHdr2Access . IS_GUARANTEEDREMOTEBROWSE_EMPTY ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "clearGuaranteedRemoteBrowse" ) ;
public class LatentDirichletAllocation { /** * Utility method that increases the map value by 1. * @ param < K > * @ param map * @ param key */ private < K > void increase ( Map < K , Integer > map , K key ) { } }
map . put ( key , map . getOrDefault ( key , 0 ) + 1 ) ;
public class DRL6Expressions { /** * $ ANTLR start synpred20 _ DRL6Expressions */ public final void synpred20_DRL6Expressions_fragment ( ) throws RecognitionException { } }
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 540:8 : ( LEFT _ PAREN primitiveType ) // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 540:9 : LEFT _ PAREN primitiveType { match ( input , LEFT_PAREN , FOLLOW_LEFT_PAREN_in_synpred20_DRL6Expressions2720 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_primitiveType_in_synpred20_DRL6Expressions2722 ) ; primitiveType ( ) ; state . _fsp -- ; if ( state . failed ) return ; }
public class MultiUserChat { /** * Returns a list of < code > Affiliate < / code > with the room outcasts . * @ return a list of < code > Affiliate < / code > with the room outcasts . * @ throws XMPPErrorException if you don ' t have enough privileges to get this information . * @ throws NoResponseException if there was no response from the server . * @ throws NotConnectedException * @ throws InterruptedException */ public List < Affiliate > getOutcasts ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return getAffiliatesByAdmin ( MUCAffiliation . outcast ) ;
public class PublicKeyRegistryByAliasImpl { /** * Returns the selected public key or null if not found . * @ param keyStoreChooser the keystore chooser * @ param publicKeyChooserByAlias the public key chooser by alias * @ return the selected public key or null if not found */ public PublicKey get ( KeyStoreChooser keyStoreChooser , PublicKeyChooserByAlias publicKeyChooserByAlias ) { } }
CacheKey cacheKey = new CacheKey ( keyStoreChooser . getKeyStoreName ( ) , publicKeyChooserByAlias . getAlias ( ) ) ; PublicKey retrievedPublicKey = cache . get ( cacheKey ) ; if ( retrievedPublicKey != null ) { return retrievedPublicKey ; } KeyStore keyStore = keyStoreRegistry . get ( keyStoreChooser ) ; if ( keyStore != null ) { PublicKeyFactoryBean factory = new PublicKeyFactoryBean ( ) ; factory . setKeystore ( keyStore ) ; factory . setAlias ( publicKeyChooserByAlias . getAlias ( ) ) ; try { factory . afterPropertiesSet ( ) ; PublicKey publicKey = ( PublicKey ) factory . getObject ( ) ; if ( publicKey != null ) { cache . put ( cacheKey , publicKey ) ; } return publicKey ; } catch ( Exception e ) { throw new PublicKeyException ( "error initializing the public key factory bean" , e ) ; } } return null ;
public class DefaultImageFormatChecker { /** * Checks if first headerSize bytes of imageHeaderBytes constitute a valid header for a HEIF * image . Details on HEIF header can be found at : < a * href = " http : / / nokiatech . github . io / heif / technical . html " > < / a > * @ param imageHeaderBytes * @ param headerSize * @ return true if imageHeaderBytes is a valid header for a HEIF image */ private static boolean isHeifHeader ( final byte [ ] imageHeaderBytes , final int headerSize ) { } }
if ( headerSize < HEIF_HEADER_LENGTH ) { return false ; } final byte boxLength = imageHeaderBytes [ 3 ] ; if ( boxLength < 8 ) { return false ; } for ( final String heifFtype : HEIF_HEADER_SUFFIXES ) { final int indexOfHeaderPattern = ImageFormatCheckerUtils . indexOfPattern ( imageHeaderBytes , imageHeaderBytes . length , ImageFormatCheckerUtils . asciiBytes ( HEIF_HEADER_PREFIX + heifFtype ) , HEIF_HEADER_LENGTH ) ; if ( indexOfHeaderPattern > - 1 ) { return true ; } } return false ;
public class OrdinalValue { /** * Compares this value and another according to the defined order , or * checks this number value for membership in a value list . * @ param o a { @ link Value } . * @ return If the provided value is an { @ link OrdinalValue } , returns * { @ link ValueComparator # GREATER _ THAN } , * { @ link ValueComparator # LESS _ THAN } or { @ link ValueComparator # EQUAL _ TO } * depending on whether this value is numerically greater than , * less than or equal to the value provided as argument . If the provided * value is a { @ link ValueList } , returns * { @ link ValueComparator # IN } if this object is a member of the list , or * { @ link ValueComparator # NOT _ IN } if not . Otherwise , returns * { @ link ValueComparator # UNKNOWN } . */ @ Override public ValueComparator compare ( Value o ) { } }
if ( o == null ) { return ValueComparator . NOT_EQUAL_TO ; } switch ( o . getType ( ) ) { case ORDINALVALUE : OrdinalValue other = ( OrdinalValue ) o ; if ( val == null || other . val == null ) { return ValueComparator . UNKNOWN ; } int c = this . index - other . index ; if ( c == 0 ) { return ValueComparator . EQUAL_TO ; } else if ( c > 0 ) { return ValueComparator . GREATER_THAN ; } else { return ValueComparator . LESS_THAN ; } case VALUELIST : ValueList < ? > vl = ( ValueList < ? > ) o ; return equals ( vl ) ? ValueComparator . EQUAL_TO : ValueComparator . NOT_EQUAL_TO ; default : return ValueComparator . NOT_EQUAL_TO ; }
public class DatabaseStoreImpl { /** * Automatic table creation . * @ param persistenceServiceUnit persistence service unit * @ throws Exception if an error occurs creating tables . */ private void createTables ( PersistenceServiceUnit persistenceServiceUnit ) throws Exception { } }
// Run under a new transaction and commit right away LocalTransactionCurrent localTranCurrent = this . localTranCurrent ; LocalTransactionCoordinator suspendedLTC = localTranCurrent . suspend ( ) ; EmbeddableWebSphereTransactionManager tranMgr = this . tranMgr ; Transaction suspendedTran = suspendedLTC == null ? tranMgr . suspend ( ) : null ; boolean psuIsPUSI = ( persistenceServiceUnit instanceof PersistenceServiceUnitImpl ) ? true : false ; synchronized ( persistenceServiceUnit ) { try { if ( psuIsPUSI ) { ( ( PersistenceServiceUnitImpl ) persistenceServiceUnit ) . setTransactionManager ( tranMgr ) ; } persistenceServiceUnit . createTables ( ) ; } finally { // resume if ( psuIsPUSI ) { ( ( PersistenceServiceUnitImpl ) persistenceServiceUnit ) . setTransactionManager ( null ) ; } if ( suspendedTran != null ) tranMgr . resume ( suspendedTran ) ; else if ( suspendedLTC != null ) localTranCurrent . resume ( suspendedLTC ) ; } }
public class BigMoney { /** * Returns a copy of this monetary value with a collection of monetary amounts subtracted . * This subtracts the specified amounts from this monetary amount , returning a new object . * The amounts are subtracted one by one as though using { @ link # minus ( BigMoneyProvider ) } . * The amounts must be in the same currency . * This instance is immutable and unaffected by this method . * @ param moniesToSubtract the monetary values to subtract , no null elements , not null * @ return the new instance with the input amounts subtracted , never null * @ throws CurrencyMismatchException if the currencies differ */ public BigMoney minus ( Iterable < ? extends BigMoneyProvider > moniesToSubtract ) { } }
BigDecimal total = amount ; for ( BigMoneyProvider moneyProvider : moniesToSubtract ) { BigMoney money = checkCurrencyEqual ( moneyProvider ) ; total = total . subtract ( money . amount ) ; } return with ( total ) ;
public class CDocumentCut { /** * return true if n is a descendant of ref * @ param n node to test * @ param ref reference node * @ return true if n is a descendant of ref */ private static boolean isDescendant ( final Node n , final Node ref ) { } }
if ( ref == null ) { return false ; } // end if if ( n == ref ) { return true ; } // end if NodeList nl = ref . getChildNodes ( ) ; for ( int i = 0 ; i < nl . getLength ( ) ; i ++ ) { boolean result = isDescendant ( n , nl . item ( i ) ) ; if ( result ) { return result ; } // end if } // end for return false ;
public class CoreOAuthProviderSupport { /** * Loads the significant parameters ( name - to - value map ) that are to be used to calculate the signature base string . * The parameters will be encoded , per the spec section 9.1. * @ param request The request . * @ return The significan parameters . */ protected SortedMap < String , SortedSet < String > > loadSignificantParametersForSignatureBaseString ( HttpServletRequest request ) { } }
// first collect the relevant parameters . . . SortedMap < String , SortedSet < String > > significantParameters = new TreeMap < String , SortedSet < String > > ( ) ; // first pull from the request . . . Enumeration parameterNames = request . getParameterNames ( ) ; while ( parameterNames . hasMoreElements ( ) ) { String parameterName = ( String ) parameterNames . nextElement ( ) ; String [ ] values = request . getParameterValues ( parameterName ) ; if ( values == null ) { values = new String [ ] { "" } ; } parameterName = oauthEncode ( parameterName ) ; for ( String parameterValue : values ) { if ( parameterValue == null ) { parameterValue = "" ; } parameterValue = oauthEncode ( parameterValue ) ; SortedSet < String > significantValues = significantParameters . get ( parameterName ) ; if ( significantValues == null ) { significantValues = new TreeSet < String > ( ) ; significantParameters . put ( parameterName , significantValues ) ; } significantValues . add ( parameterValue ) ; } } // then take into account the header parameter values . . . Map < String , String > oauthParams = parseParameters ( request ) ; oauthParams . remove ( "realm" ) ; // remove the realm Set < String > parsedParams = oauthParams . keySet ( ) ; for ( String parameterName : parsedParams ) { String parameterValue = oauthParams . get ( parameterName ) ; if ( parameterValue == null ) { parameterValue = "" ; } parameterName = oauthEncode ( parameterName ) ; parameterValue = oauthEncode ( parameterValue ) ; SortedSet < String > significantValues = significantParameters . get ( parameterName ) ; if ( significantValues == null ) { significantValues = new TreeSet < String > ( ) ; significantParameters . put ( parameterName , significantValues ) ; } significantValues . add ( parameterValue ) ; } // remove the oauth signature parameter value . significantParameters . remove ( OAuthConsumerParameter . oauth_signature . toString ( ) ) ; return significantParameters ;
public class MemoryFileManager { /** * Returns a { @ linkplain JavaFileObject file object } for output * representing the specified class of the specified kind in the * given location . * < p > Optionally , this file manager might consider the sibling as * a hint for where to place the output . The exact semantics of * this hint is unspecified . The JDK compiler , javac , for * example , will place class files in the same directories as * originating source files unless a class file output directory * is provided . To facilitate this behavior , javac might provide * the originating source file as sibling when calling this * method . * @ param location a location * @ param className the name of a class * @ param kind the kind of file , must be one of { @ link * JavaFileObject . Kind # SOURCE SOURCE } or { @ link * JavaFileObject . Kind # CLASS CLASS } * @ param sibling a file object to be used as hint for placement ; * might be { @ code null } * @ return a file object for output * @ throws IllegalArgumentException if sibling is not known to * this file manager , or if the location is not known to this file * manager and the file manager does not support unknown * locations , or if the kind is not valid * @ throws IOException if an I / O error occurred , or if { @ link * # close } has been called and this file manager cannot be * reopened * @ throws IllegalStateException { @ link # close } has been called * and this file manager cannot be reopened */ @ Override public JavaFileObject getJavaFileForOutput ( JavaFileManager . Location location , String className , Kind kind , FileObject sibling ) throws IOException { } }
OutputMemoryJavaFileObject fo ; fo = new OutputMemoryJavaFileObject ( className , kind ) ; classObjects . put ( className , fo ) ; proc . debug ( DBG_FMGR , "Set out file: %s = %s\n" , className , fo ) ; if ( classListener != null ) { classListener . newClassFile ( fo , location , className , kind , sibling ) ; } return fo ;
public class TopoGraph { /** * Returns a user index value for the chain . */ int getChainUserIndex ( int chain , int index ) { } }
int i = getChainIndex_ ( chain ) ; AttributeStreamOfInt32 stream = m_chainIndices . get ( index ) ; if ( stream . size ( ) <= i ) return - 1 ; return stream . read ( i ) ;
public class ThreadSet { /** * Get threads blocked by any of current threads . */ public @ Nonnull SetType getBlockedThreads ( ) { } }
Set < ThreadLock > acquired = new HashSet < ThreadLock > ( ) ; for ( ThreadType thread : threads ) { acquired . addAll ( thread . getAcquiredLocks ( ) ) ; } Set < ThreadType > blocked = new HashSet < ThreadType > ( ) ; for ( ThreadType thread : runtime . getThreads ( ) ) { if ( acquired . contains ( thread . getWaitingToLock ( ) ) ) { blocked . add ( thread ) ; } } return runtime . getThreadSet ( blocked ) ;
public class XmlInOut { /** * Enable or disable all the behaviors . * @ param record The target record . * @ param bEnableRecordBehaviors Enable / disable all the record behaviors . * @ param bEnableFieldBehaviors Enable / disable all the field behaviors . */ public static void enableAllBehaviors ( Record record , boolean bEnableRecordBehaviors , boolean bEnableFieldBehaviors ) { } }
if ( record == null ) return ; record . setEnableListeners ( bEnableRecordBehaviors ) ; // Disable all file behaviors for ( int iFieldSeq = 0 ; iFieldSeq < record . getFieldCount ( ) ; iFieldSeq ++ ) { BaseField field = record . getField ( iFieldSeq ) ; field . setEnableListeners ( bEnableFieldBehaviors ) ; }
public class SeaGlassLookAndFeel { /** * Returns true if the Style should be updated in response to the specified * PropertyChangeEvent . This forwards to < code > * shouldUpdateStyleOnAncestorChanged < / code > as necessary . * @ param event the property change event . * @ return { @ code true } if the style should be updated as a result of this * property change , { @ code false } otherwise . */ public static boolean shouldUpdateStyle ( PropertyChangeEvent event ) { } }
String eName = event . getPropertyName ( ) ; if ( "name" == eName ) { // Always update on a name change return true ; } else if ( "componentOrientation" == eName ) { // Always update on a component orientation change return true ; } else if ( "ancestor" == eName && event . getNewValue ( ) != null ) { // Only update on an ancestor change when getting a valid // parent and the LookAndFeel wants this . LookAndFeel laf = UIManager . getLookAndFeel ( ) ; return ( laf instanceof SynthLookAndFeel && ( ( SynthLookAndFeel ) laf ) . shouldUpdateStyleOnAncestorChanged ( ) ) ; } /* * Note : The following two Sea Glass based overrides should be * refactored to be in the SeaGlass LAF . Due to constraints in an update * release , we couldn ' t actually provide the public API necessary to * allow SeaGlassLookAndFeel ( a subclass of SynthLookAndFeel ) to provide * its own rules for shouldUpdateStyle . */ else if ( "SeaGlass.Overrides" == eName ) { // Always update when the SeaGlass . Overrides client property has // been changed return true ; } else if ( "SeaGlass.Overrides.InheritDefaults" == eName ) { // Always update when the SeaGlass . Overrides . InheritDefaults // client property has changed return true ; } else if ( "JComponent.sizeVariant" == eName ) { // Always update when the JComponent . sizeVariant // client property has changed return true ; } else if ( eName != null && ( eName . startsWith ( "JButton." ) || eName . startsWith ( "JTextField." ) ) ) { // Always update when an Apple - style variant client property has // changed . return true ; } return false ;
public class JobExecutionsInner { /** * Lists all executions in a job agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent . * @ param createTimeMin If specified , only job executions created at or after the specified time are included . * @ param createTimeMax If specified , only job executions created before the specified time are included . * @ param endTimeMin If specified , only job executions completed at or after the specified time are included . * @ param endTimeMax If specified , only job executions completed before the specified time are included . * @ param isActive If specified , only active or only completed job executions are included . * @ param skip The number of elements in the collection to skip . * @ param top The number of elements to return from the collection . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobExecutionInner & gt ; object */ public Observable < ServiceResponse < Page < JobExecutionInner > > > listByAgentWithServiceResponseAsync ( final String resourceGroupName , final String serverName , final String jobAgentName , final DateTime createTimeMin , final DateTime createTimeMax , final DateTime endTimeMin , final DateTime endTimeMax , final Boolean isActive , final Integer skip , final Integer top ) { } }
return listByAgentSinglePageAsync ( resourceGroupName , serverName , jobAgentName , createTimeMin , createTimeMax , endTimeMin , endTimeMax , isActive , skip , top ) . concatMap ( new Func1 < ServiceResponse < Page < JobExecutionInner > > , Observable < ServiceResponse < Page < JobExecutionInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < JobExecutionInner > > > call ( ServiceResponse < Page < JobExecutionInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByAgentNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class pqbinding { /** * Use this API to fetch all the pqbinding resources that are configured on netscaler . * This uses pqbinding _ args which is a way to provide additional arguments while fetching the resources . */ public static pqbinding [ ] get ( nitro_service service , pqbinding_args args ) throws Exception { } }
pqbinding obj = new pqbinding ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; pqbinding [ ] response = ( pqbinding [ ] ) obj . get_resources ( service , option ) ; return response ;