signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class HAProxyMessage { /** * Validate an address ( IPv4 , IPv6 , Unix Socket ) * @ param address human - readable address * @ param addrFamily the { @ link AddressFamily } to check the address against * @ throws HAProxyProtocolException if the address is invalid */ private static void checkAddress ( String address , AddressFamily addrFamily ) { } }
if ( addrFamily == null ) { throw new NullPointerException ( "addrFamily" ) ; } switch ( addrFamily ) { case AF_UNSPEC : if ( address != null ) { throw new HAProxyProtocolException ( "unable to validate an AF_UNSPEC address: " + address ) ; } return ; case AF_UNIX : return ; } if ( address == null ) { throw new NullPointerException ( "address" ) ; } switch ( addrFamily ) { case AF_IPv4 : if ( ! NetUtil . isValidIpV4Address ( address ) ) { throw new HAProxyProtocolException ( "invalid IPv4 address: " + address ) ; } break ; case AF_IPv6 : if ( ! NetUtil . isValidIpV6Address ( address ) ) { throw new HAProxyProtocolException ( "invalid IPv6 address: " + address ) ; } break ; default : throw new Error ( ) ; }
public class JpaDeploymentManagement { /** * method assigns the { @ link DistributionSet } to all { @ link Target } s by * their IDs with a specific { @ link ActionType } and { @ code forcetime } . * In case the update was executed offline ( i . e . not managed by hawkBit ) the * handling differs my means that : < br / > * A . it ignores targets completely that are in * { @ link TargetUpdateStatus # PENDING } . < br / > * B . it created completed actions . < br / > * C . sets both installed and assigned DS on the target and switches the * status to { @ link TargetUpdateStatus # IN _ SYNC } < br / > * D . does not send a { @ link TargetAssignDistributionSetEvent } . < br / > * @ param dsID * the ID of the distribution set to assign * @ param targetsWithActionType * a list of all targets and their action type * @ param actionMessage * an optional message to be written into the action status * @ param assignmentStrategy * the assignment strategy ( online / offline ) * @ return the assignment result * @ throw IncompleteDistributionSetException if mandatory * { @ link SoftwareModuleType } are not assigned as define by the * { @ link DistributionSetType } . */ private DistributionSetAssignmentResult assignDistributionSetToTargets ( final Long dsID , final Collection < TargetWithActionType > targetsWithActionType , final String actionMessage , final AbstractDsAssignmentStrategy assignmentStrategy ) { } }
final JpaDistributionSet distributionSetEntity = getAndValidateDsById ( dsID ) ; final List < String > controllerIDs = getControllerIdsForAssignmentAndCheckQuota ( targetsWithActionType , distributionSetEntity ) ; final List < JpaTarget > targetEntities = assignmentStrategy . findTargetsForAssignment ( controllerIDs , distributionSetEntity . getId ( ) ) ; if ( targetEntities . isEmpty ( ) ) { // detaching as it is not necessary to persist the set itself entityManager . detach ( distributionSetEntity ) ; // return with nothing as all targets had the DS already assigned return new DistributionSetAssignmentResult ( Collections . emptyList ( ) , 0 , targetsWithActionType . size ( ) , Collections . emptyList ( ) , targetManagement ) ; } // split tIDs length into max entries in - statement because many database // have constraint of max entries in in - statements e . g . Oracle with // maximum 1000 elements , so we need to split the entries here and // execute multiple statements final List < List < Long > > targetEntitiesIdsChunks = Lists . partition ( targetEntities . stream ( ) . map ( Target :: getId ) . collect ( Collectors . toList ( ) ) , Constants . MAX_ENTRIES_IN_STATEMENT ) ; // override all active actions and set them into canceling state , we // need to remember which one we have been switched to canceling state // because for targets which we have changed to canceling we don ' t want // to publish the new action update event . final Set < Long > cancelingTargetEntitiesIds = closeOrCancelActiveActions ( assignmentStrategy , targetEntitiesIdsChunks ) ; // cancel all scheduled actions which are in - active , these actions were // not active before and the manual assignment which has been done // cancels them targetEntitiesIdsChunks . forEach ( this :: cancelInactiveScheduledActionsForTargets ) ; setAssignedDistributionSetAndTargetUpdateStatus ( assignmentStrategy , distributionSetEntity , targetEntitiesIdsChunks ) ; final Map < String , JpaAction > controllerIdsToActions = createActions ( targetsWithActionType , targetEntities , assignmentStrategy , distributionSetEntity ) ; // create initial action status when action is created so we remember // the initial running status because we will change the status // of the action itself and with this action status we have a nicer // action history . createActionsStatus ( controllerIdsToActions . values ( ) , assignmentStrategy , actionMessage ) ; detachEntitiesAndSendAssignmentEvents ( distributionSetEntity , targetEntities , assignmentStrategy , cancelingTargetEntitiesIds , controllerIdsToActions ) ; return new DistributionSetAssignmentResult ( targetEntities . stream ( ) . map ( Target :: getControllerId ) . collect ( Collectors . toList ( ) ) , targetEntities . size ( ) , controllerIDs . size ( ) - targetEntities . size ( ) , Lists . newArrayList ( controllerIdsToActions . values ( ) ) , targetManagement ) ;
public class ServiceTaskBase { /** * Handle all exception occurred while doing the task . * @ param e the exception to handle */ private void handleException ( final Throwable e ) { } }
if ( e instanceof ServiceException ) { final ServiceException se = ( ServiceException ) e ; // Only log with warn level to let the application continue its workflow even in developer mode LOGGER . log ( SERVICE_TASK_EXCEPTION , se , se . getExplanation ( ) , getServiceHandlerName ( ) ) ; } else { // In developer mode it will stop the application throwing another exception LOGGER . log ( SERVICE_TASK_ERROR , e , getServiceHandlerName ( ) ) ; } this . wave . status ( Status . Failed ) ; final Class < ? extends Throwable > managedException = e instanceof ServiceException ? e . getCause ( ) . getClass ( ) : e . getClass ( ) ; // Get the exact exception type final Wave exceptionHandlerWave = this . wave . waveType ( ) . waveExceptionHandler ( ) . get ( managedException ) ; if ( exceptionHandlerWave != null ) { // Fill the wave with useful data exceptionHandlerWave . fromClass ( this . service . getClass ( ) ) . add ( JRebirthItems . exceptionItem , e ) . add ( JRebirthItems . waveItem , this . wave ) . relatedWave ( this . wave ) ; LOGGER . log ( SERVICE_TASK_HANDLE_EXCEPTION , e , e . getClass ( ) . getSimpleName ( ) , getServiceHandlerName ( ) ) ; // Send the exception wave to interested components this . service . sendWave ( exceptionHandlerWave ) ; } else { LOGGER . log ( SERVICE_TASK_NOT_MANAGED_EXCEPTION , e , e . getClass ( ) . getSimpleName ( ) , getServiceHandlerName ( ) ) ; }
public class TorqueModelDef { /** * Adds the indirection table for the given collection descriptor . * @ param collDef The collection descriptor */ private void addIndirectionTable ( CollectionDescriptorDef collDef ) { } }
String tableName = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_INDIRECTION_TABLE ) ; TableDef tableDef = getTable ( tableName ) ; if ( tableDef == null ) { tableDef = new TableDef ( tableName ) ; addTable ( tableDef ) ; } if ( collDef . hasProperty ( PropertyHelper . OJB_PROPERTY_INDIRECTION_TABLE_DOCUMENTATION ) ) { tableDef . setProperty ( PropertyHelper . OJB_PROPERTY_TABLE_DOCUMENTATION , collDef . getProperty ( PropertyHelper . OJB_PROPERTY_INDIRECTION_TABLE_DOCUMENTATION ) ) ; } // we add columns for every primarykey in this and the element type // collection . foreignkeys < - > ownerclass . primarykeys // collection . remote - foreignkeys < - > elementclass . primarykeys // we also add foreignkeys to the table // name is empty ( default foreignkey ) // remote table = table of ownerclass / elementclass // local columns = columns in indirection table // remote columns = columns of corresponding primarykeys in ownerclass / elementclass ClassDescriptorDef ownerClassDef = ( ClassDescriptorDef ) collDef . getOwner ( ) ; ModelDef modelDef = ( ModelDef ) ownerClassDef . getOwner ( ) ; String elementClassName = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_ELEMENT_CLASS_REF ) ; ClassDescriptorDef elementClassDef = modelDef . getClass ( elementClassName ) ; ArrayList localPrimFields = ownerClassDef . getPrimaryKeys ( ) ; ArrayList remotePrimFields = elementClassDef . getPrimaryKeys ( ) ; String localKeyList = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_FOREIGNKEY ) ; String remoteKeyList = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_REMOTE_FOREIGNKEY ) ; String ownerTable = getTargetTable ( ownerClassDef , tableName , localKeyList ) ; String elementTable = getTargetTable ( elementClassDef , tableName , remoteKeyList ) ; CommaListIterator localKeys = new CommaListIterator ( localKeyList ) ; CommaListIterator localKeyDocs = new CommaListIterator ( collDef . getProperty ( PropertyHelper . OJB_PROPERTY_FOREIGNKEY_DOCUMENTATION ) ) ; CommaListIterator remoteKeys = new CommaListIterator ( remoteKeyList ) ; CommaListIterator remoteKeyDocs = new CommaListIterator ( collDef . getProperty ( PropertyHelper . OJB_PROPERTY_REMOTE_FOREIGNKEY_DOCUMENTATION ) ) ; ArrayList localColumns = new ArrayList ( ) ; ArrayList remoteColumns = new ArrayList ( ) ; boolean asPrimarykeys = collDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_INDIRECTION_TABLE_PRIMARYKEYS , false ) ; FieldDescriptorDef fieldDef ; ColumnDef columnDef ; String relationName ; String name ; int idx ; for ( idx = 0 ; localKeys . hasNext ( ) ; idx ++ ) { fieldDef = ( FieldDescriptorDef ) localPrimFields . get ( idx ) ; name = localKeys . getNext ( ) ; columnDef = tableDef . getColumn ( name ) ; if ( columnDef == null ) { columnDef = new ColumnDef ( name ) ; tableDef . addColumn ( columnDef ) ; } columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_TYPE , fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_JDBC_TYPE ) ) ; columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_SIZE , fieldDef . getSizeConstraint ( ) ) ; if ( asPrimarykeys ) { columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_PRIMARYKEY , "true" ) ; } if ( localKeyDocs . hasNext ( ) ) { columnDef . setProperty ( PropertyHelper . OJB_PROPERTY_COLUMN_DOCUMENTATION , localKeyDocs . getNext ( ) ) ; } localColumns . add ( name ) ; remoteColumns . add ( fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_COLUMN ) ) ; } if ( collDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_DATABASE_FOREIGNKEY , true ) ) { relationName = collDef . getProperty ( PropertyHelper . TORQUE_PROPERTY_RELATION_NAME ) ; if ( ( relationName != null ) && ( ownerTable != null ) ) { tableDef . addForeignkey ( relationName , ownerTable , localColumns , remoteColumns ) ; } } localColumns . clear ( ) ; remoteColumns . clear ( ) ; for ( idx = 0 ; remoteKeys . hasNext ( ) ; idx ++ ) { fieldDef = ( FieldDescriptorDef ) remotePrimFields . get ( idx ) ; name = remoteKeys . getNext ( ) ; columnDef = tableDef . getColumn ( name ) ; if ( columnDef == null ) { columnDef = new ColumnDef ( name ) ; tableDef . addColumn ( columnDef ) ; } columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_TYPE , fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_JDBC_TYPE ) ) ; columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_SIZE , fieldDef . getSizeConstraint ( ) ) ; if ( asPrimarykeys ) { columnDef . setProperty ( PropertyHelper . TORQUE_PROPERTY_PRIMARYKEY , "true" ) ; } if ( remoteKeyDocs . hasNext ( ) ) { columnDef . setProperty ( PropertyHelper . OJB_PROPERTY_COLUMN_DOCUMENTATION , remoteKeyDocs . getNext ( ) ) ; } localColumns . add ( name ) ; remoteColumns . add ( fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_COLUMN ) ) ; } CollectionDescriptorDef elementCollDef = collDef . getRemoteCollection ( ) ; if ( ( ( elementCollDef != null ) && elementCollDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_DATABASE_FOREIGNKEY , true ) ) || ( ( elementCollDef == null ) && collDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_DATABASE_FOREIGNKEY , true ) ) ) { relationName = collDef . getProperty ( PropertyHelper . TORQUE_PROPERTY_INV_RELATION_NAME ) ; if ( ( relationName != null ) && ( elementTable != null ) ) { tableDef . addForeignkey ( relationName , elementTable , localColumns , remoteColumns ) ; } }
public class DropCachesRpc { /** * Drops in memory caches . */ private void dropCaches ( final TSDB tsdb , final Channel chan ) { } }
LOG . warn ( chan + " Dropping all in-memory caches." ) ; tsdb . dropCaches ( ) ;
public class GapFunction { /** * Returns whether two abstract parameters have a gap that satisfies * the constraints of the gap function . If either provided parameter is * < code > null < / code > , this method returns < code > false < / code > . * @ param lhs the first { @ link AbstractParameter } . * @ param rhs the second { @ link AbstractParameter } . * @ return < code > true < / code > or < code > false < / code > . */ public final boolean execute ( TemporalProposition lhs , TemporalProposition rhs ) { } }
if ( lhs == null || rhs == null ) { return false ; } else { return execute ( lhs . getInterval ( ) , rhs . getInterval ( ) ) ; }
public class TransactionTopologyBuilder { /** * Build spout to provide the compatibility with Storm ' s ack mechanism * @ param id spout Id * @ param spout * @ return */ public SpoutDeclarer setSpoutWithAck ( String id , IRichSpout spout , Number parallelismHint ) { } }
return setSpout ( id , new AckTransactionSpout ( spout ) , parallelismHint ) ;
public class DefaultBeanContext { /** * Execution the creation of a bean . The returned value can be null if a * factory method returned null . * @ param resolutionContext The { @ link BeanResolutionContext } * @ param beanDefinition The { @ link BeanDefinition } * @ param qualifier The { @ link Qualifier } * @ param isSingleton Whether the bean is a singleton * @ param argumentValues Any argument values passed to create the bean * @ param < T > The bean generic type * @ return The created bean */ protected @ Nullable < T > T doCreateBean ( @ Nullable BeanResolutionContext resolutionContext , @ Nonnull BeanDefinition < T > beanDefinition , @ Nullable Qualifier < T > qualifier , boolean isSingleton , @ Nullable Map < String , Object > argumentValues ) { } }
Qualifier declaredQualifier = resolveDeclaredQualifier ( beanDefinition ) ; T bean ; Class < T > beanType = beanDefinition . getBeanType ( ) ; if ( isSingleton ) { BeanRegistration < T > beanRegistration = singletonObjects . get ( new BeanKey ( beanDefinition , declaredQualifier ) ) ; if ( beanRegistration != null ) { if ( qualifier == null ) { return beanRegistration . bean ; } else if ( qualifier . reduce ( beanType , Stream . of ( beanRegistration . beanDefinition ) ) . findFirst ( ) . isPresent ( ) ) { return beanRegistration . bean ; } } else if ( qualifier != null ) { beanRegistration = singletonObjects . get ( new BeanKey ( beanDefinition , null ) ) ; if ( beanRegistration != null ) { if ( qualifier . reduce ( beanType , Stream . of ( beanRegistration . beanDefinition ) ) . findFirst ( ) . isPresent ( ) ) { return beanRegistration . bean ; } } } } if ( resolutionContext == null ) { resolutionContext = new DefaultBeanResolutionContext ( this , beanDefinition ) ; } if ( beanDefinition instanceof BeanFactory ) { BeanFactory < T > beanFactory = ( BeanFactory < T > ) beanDefinition ; try { if ( beanFactory instanceof ParametrizedBeanFactory ) { ParametrizedBeanFactory < T > parametrizedBeanFactory = ( ParametrizedBeanFactory < T > ) beanFactory ; Argument < ? > [ ] requiredArguments = parametrizedBeanFactory . getRequiredArguments ( ) ; if ( argumentValues == null ) { throw new BeanInstantiationException ( resolutionContext , "Missing bean arguments for type: " + beanType . getName ( ) + ". Requires arguments: " + ArrayUtils . toString ( requiredArguments ) ) ; } Map < String , Object > convertedValues = new LinkedHashMap < > ( argumentValues ) ; for ( Argument < ? > requiredArgument : requiredArguments ) { Object val = argumentValues . get ( requiredArgument . getName ( ) ) ; if ( val == null && ! requiredArgument . getAnnotationMetadata ( ) . hasDeclaredAnnotation ( Nullable . class ) ) { throw new BeanInstantiationException ( resolutionContext , "Missing bean argument [" + requiredArgument + "]." ) ; } BeanResolutionContext finalResolutionContext = resolutionContext ; Object convertedValue = null ; if ( val != null ) { convertedValue = ConversionService . SHARED . convert ( val , requiredArgument ) . orElseThrow ( ( ) -> new BeanInstantiationException ( finalResolutionContext , "Invalid bean argument [" + requiredArgument + "]. Cannot convert object [" + val + "] to required type: " + requiredArgument . getType ( ) ) ) ; } convertedValues . put ( requiredArgument . getName ( ) , convertedValue ) ; } bean = parametrizedBeanFactory . build ( resolutionContext , this , beanDefinition , convertedValues ) ; } else { bean = beanFactory . build ( resolutionContext , this , beanDefinition ) ; if ( bean == null ) { if ( ! ( beanDefinition . isIterable ( ) || beanDefinition . getAnnotationMetadata ( ) . hasAnnotation ( Factory . class ) ) ) { throw new BeanInstantiationException ( resolutionContext , "Bean Factory [" + beanFactory + "] returned null" ) ; } } else { if ( bean instanceof Qualified ) { ( ( Qualified ) bean ) . $withBeanQualifier ( declaredQualifier ) ; } } } } catch ( Throwable e ) { if ( e instanceof DependencyInjectionException ) { throw e ; } if ( e instanceof BeanInstantiationException ) { throw e ; } else { if ( ! resolutionContext . getPath ( ) . isEmpty ( ) ) { throw new BeanInstantiationException ( resolutionContext , e ) ; } else { throw new BeanInstantiationException ( beanDefinition , e ) ; } } } } else { ConstructorInjectionPoint < T > constructor = beanDefinition . getConstructor ( ) ; Argument [ ] requiredConstructorArguments = constructor . getArguments ( ) ; if ( requiredConstructorArguments . length == 0 ) { bean = constructor . invoke ( ) ; } else { Object [ ] constructorArgs = new Object [ requiredConstructorArguments . length ] ; for ( int i = 0 ; i < requiredConstructorArguments . length ; i ++ ) { Class argument = requiredConstructorArguments [ i ] . getType ( ) ; constructorArgs [ i ] = getBean ( resolutionContext , argument ) ; } bean = constructor . invoke ( constructorArgs ) ; } inject ( resolutionContext , null , bean ) ; } if ( bean != null ) { if ( ! BeanCreatedEventListener . class . isInstance ( bean ) ) { if ( CollectionUtils . isNotEmpty ( beanCreationEventListeners ) ) { BeanKey beanKey = new BeanKey ( beanDefinition , qualifier ) ; for ( BeanRegistration < BeanCreatedEventListener > registration : beanCreationEventListeners ) { BeanDefinition < BeanCreatedEventListener > definition = registration . getBeanDefinition ( ) ; List < Argument < ? > > typeArguments = definition . getTypeArguments ( BeanCreatedEventListener . class ) ; if ( CollectionUtils . isEmpty ( typeArguments ) || typeArguments . get ( 0 ) . getType ( ) . isAssignableFrom ( beanType ) ) { BeanCreatedEventListener listener = registration . getBean ( ) ; bean = ( T ) listener . onCreated ( new BeanCreatedEvent ( this , beanDefinition , beanKey , bean ) ) ; if ( bean == null ) { throw new BeanInstantiationException ( resolutionContext , "Listener [" + listener + "] returned null from onCreated event" ) ; } } } } } if ( beanDefinition instanceof ValidatedBeanDefinition ) { bean = ( ( ValidatedBeanDefinition < T > ) beanDefinition ) . validate ( resolutionContext , bean ) ; } if ( LOG_LIFECYCLE . isDebugEnabled ( ) ) { LOG_LIFECYCLE . debug ( "Created bean [{}] from definition [{}] with qualifier [{}]" , bean , beanDefinition , qualifier ) ; } } return bean ;
public class AbstractLifeCycle { /** * First changes the state from a current value to a target * value . Second , publishes the change of state to all the * listeners through { @ link # getStateListeners ( ) } . * @ param from the current state * @ param to the new state * @ throws Exception if the state cannot be changed or some * { @ link StateListener } failed to apply the same * change . */ protected void changeState ( State from , State to ) throws Exception { } }
if ( ! this . state . compareAndSet ( from , to ) ) { throw new Exception ( "Cannot change state from " + from + " to " + to + " for " + this ) ; } publishState ( from , to ) ;
public class KeyIgnoringCRAMOutputFormat { /** * < code > setSAMHeader < / code > or < code > readSAMHeaderFrom < / code > must have * been called first . */ @ Override public RecordWriter < K , SAMRecordWritable > getRecordWriter ( TaskAttemptContext ctx ) throws IOException { } }
return getRecordWriter ( ctx , getDefaultWorkFile ( ctx , "" ) ) ;
public class ResourceLoader { /** * Load a resource via the thread context classloader . If security permissions don ' t allow * this fallback to loading via current classloader . * @ param name a resource name * @ return an { @ link InputStream } or null if resource is not found */ public static InputStream getResourceAsStream ( String name ) { } }
InputStream stream = null ; try { stream = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( name ) ; } catch ( SecurityException e ) { LOG . info ( "Unable to access context classloader, using default. " + e . getMessage ( ) ) ; } if ( stream == null ) { stream = ResourceLoader . class . getResourceAsStream ( "/" + name ) ; } return stream ;
public class SetRiskConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SetRiskConfigurationRequest setRiskConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( setRiskConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( setRiskConfigurationRequest . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( setRiskConfigurationRequest . getClientId ( ) , CLIENTID_BINDING ) ; protocolMarshaller . marshall ( setRiskConfigurationRequest . getCompromisedCredentialsRiskConfiguration ( ) , COMPROMISEDCREDENTIALSRISKCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( setRiskConfigurationRequest . getAccountTakeoverRiskConfiguration ( ) , ACCOUNTTAKEOVERRISKCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( setRiskConfigurationRequest . getRiskExceptionConfiguration ( ) , RISKEXCEPTIONCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OperationDefinition { /** * syntactic sugar */ public OperationDefinitionContactComponent addContact ( ) { } }
OperationDefinitionContactComponent t = new OperationDefinitionContactComponent ( ) ; if ( this . contact == null ) this . contact = new ArrayList < OperationDefinitionContactComponent > ( ) ; this . contact . add ( t ) ; return t ;
public class ServletHttpResponse { public void setIntHeader ( String name , int value ) { } }
try { _httpResponse . setIntField ( name , value ) ; } catch ( IllegalStateException e ) { LogSupport . ignore ( log , e ) ; }
public class JobStreamsInner { /** * Retrieve a list of jobs streams identified by job id . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param jobId The job Id . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobStreamInner & gt ; object */ public Observable < ServiceResponse < Page < JobStreamInner > > > listByJobWithServiceResponseAsync ( final String resourceGroupName , final String automationAccountName , final String jobId ) { } }
return listByJobSinglePageAsync ( resourceGroupName , automationAccountName , jobId ) . concatMap ( new Func1 < ServiceResponse < Page < JobStreamInner > > , Observable < ServiceResponse < Page < JobStreamInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < JobStreamInner > > > call ( ServiceResponse < Page < JobStreamInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByJobNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class MediaReader { /** * Add the requested query string arguments to the Request . * @ param request Request to add query string arguments to */ private void addQueryParams ( final Request request ) { } }
if ( absoluteDateCreated != null ) { request . addQueryParam ( "DateCreated" , absoluteDateCreated . toString ( Request . QUERY_STRING_DATE_TIME_FORMAT ) ) ; } else if ( rangeDateCreated != null ) { request . addQueryDateTimeRange ( "DateCreated" , rangeDateCreated ) ; } if ( getPageSize ( ) != null ) { request . addQueryParam ( "PageSize" , Integer . toString ( getPageSize ( ) ) ) ; }
public class GeometryExpression { /** * Returns a geometric object that represents the * Point set symmetric difference of this geometric object with anotherGeometry . * @ param geometry other geometry * @ return symmetric difference between this and the geometry */ public GeometryExpression < Geometry > symDifference ( Expression < ? extends Geometry > geometry ) { } }
return GeometryExpressions . geometryOperation ( SpatialOps . SYMDIFFERENCE , mixin , geometry ) ;
public class HCHead { /** * Append some JavaScript code at the specified index * @ param nIndex * The index where the JS should be added ( counting only JS elements ) * @ param aJS * The JS to be added . May not be < code > null < / code > . * @ return this */ @ Nonnull public final HCHead addJSAt ( @ Nonnegative final int nIndex , @ Nonnull final IHCNode aJS ) { } }
ValueEnforcer . notNull ( aJS , "JS" ) ; if ( ! HCJSNodeDetector . isJSNode ( aJS ) ) throw new IllegalArgumentException ( aJS + " is not a valid JS node!" ) ; m_aJS . add ( nIndex , aJS ) ; return this ;
public class TypeDef { /** * Creates a { @ link ClassRef } for the current definition with the specified arguments . * @ param arguments The arguments to be passed to the reference . */ public ClassRef toReference ( TypeRef ... arguments ) { } }
List < TypeRef > actualArguments = new ArrayList < TypeRef > ( ) ; for ( int i = 0 ; i < parameters . size ( ) ; i ++ ) { if ( i < arguments . length ) { actualArguments . add ( arguments [ i ] ) ; } else { actualArguments . add ( new WildcardRef ( ) ) ; } } return new ClassRefBuilder ( ) . withDefinition ( this ) . withArguments ( actualArguments ) . withAttributes ( getAttributes ( ) ) . build ( ) ;
public class ChronoHistory { /** * / * [ deutsch ] * < p > Liefert die Variante eines historischen Kalenders . < / p > * @ return text describing the internal state * @ see # from ( String ) * @ since 3.11/4.8 */ @ Override public String getVariant ( ) { } }
StringBuilder sb = new StringBuilder ( 64 ) ; sb . append ( "historic-" ) ; sb . append ( this . variant . name ( ) ) ; switch ( this . variant ) { case PROLEPTIC_GREGORIAN : case PROLEPTIC_JULIAN : case PROLEPTIC_BYZANTINE : sb . append ( ":no-cutover" ) ; break ; case INTRODUCTION_ON_1582_10_15 : case SINGLE_CUTOVER_DATE : sb . append ( ":cutover=" ) ; sb . append ( this . getGregorianCutOverDate ( ) ) ; // fall - through default : sb . append ( ":ancient-julian-leap-years=" ) ; if ( this . ajly != null ) { int [ ] pattern = this . ajly . getPattern ( ) ; sb . append ( '[' ) ; sb . append ( pattern [ 0 ] ) ; for ( int i = 1 ; i < pattern . length ; i ++ ) { sb . append ( ',' ) ; sb . append ( pattern [ i ] ) ; } sb . append ( ']' ) ; } else { sb . append ( "[]" ) ; } sb . append ( ":new-year-strategy=" ) ; sb . append ( this . getNewYearStrategy ( ) ) ; sb . append ( ":era-preference=" ) ; sb . append ( this . getEraPreference ( ) ) ; } return sb . toString ( ) ;
public class ReflectorLoader { /** * Loads a class per name . Unlike a normal loadClass this version * behaves different during a class definition . In that case it * checks if the class we want to load is Reflector and returns * class if the check is successful . If it is not during a class * definition it just calls the super class version of loadClass . * @ param name of the class to load * @ param resolve is true if the class should be resolved * @ see Reflector * @ see ClassLoader # loadClass ( String , boolean ) */ protected synchronized Class loadClass ( String name , boolean resolve ) throws ClassNotFoundException { } }
if ( inDefine ) { if ( name . equals ( REFLECTOR ) ) return Reflector . class ; } return super . loadClass ( name , resolve ) ;
public class LoggingConfigDeploymentProcessor { /** * Finds the configuration file to be used and returns the first one found . * Preference is for { @ literal logging . properties } or { @ literal jboss - logging . properties } . * @ param resourceRoot the resource to check . * @ return the configuration file if found , otherwise { @ code null } . * @ throws DeploymentUnitProcessingException if an error occurs . */ private VirtualFile findConfigFile ( ResourceRoot resourceRoot ) throws DeploymentUnitProcessingException { } }
final VirtualFile root = resourceRoot . getRoot ( ) ; // First check META - INF VirtualFile file = root . getChild ( "META-INF" ) ; VirtualFile result = findConfigFile ( file ) ; if ( result == null ) { file = root . getChild ( "WEB-INF/classes" ) ; result = findConfigFile ( file ) ; } return result ;
public class ParameterParser { /** * Returns the value of a given parameter . Null if the parameter * does not present . */ public String get ( String name ) { } }
Parameter param = parameters . get ( name ) ; if ( param != null ) return param . value ; else return null ;
public class HttpSession { /** * Checks if a particular cookie has the same value as one of the token values in the HTTP * session . If the { @ literal cookie } parameter is null , the session matches the token if it does * not have a value for the corresponding token . * @ param tokenName the token name * @ param cookie the cookie * @ return true , if true */ public boolean matchesToken ( String tokenName , HttpCookie cookie ) { } }
// Check if the cookie is null if ( cookie == null ) { return tokenValues . containsKey ( tokenName ) ? false : true ; } // Check the value of the token from the cookie String tokenValue = getTokenValue ( tokenName ) ; if ( tokenValue != null && tokenValue . equals ( cookie . getValue ( ) ) ) { return true ; } return false ;
public class JsonParseUtil { /** * Parses the current token as a character . * @ param parser * @ return { @ link Character } * @ throws IOException * @ throws JsonFormatException * @ author vvakame */ public static Character parserCharacter ( JsonPullParser parser ) throws IOException , JsonFormatException { } }
State eventType = parser . getEventType ( ) ; if ( eventType == State . VALUE_NULL ) { return null ; } else if ( eventType == State . VALUE_STRING ) { String str = parser . getValueString ( ) ; if ( str . length ( ) != 1 ) { throw new IllegalStateException ( "unexpected value. expecte string size is 1. but get=" + str ) ; } return str . charAt ( 0 ) ; } else { throw new IllegalStateException ( "unexpected state. expected=VALUE_STRING, but get=" + eventType . toString ( ) ) ; }
public class PasswordEditText { /** * Adds all helper texts , which are contained by a specific array . The helper texts are added in * the given order . * @ param resourceIds * An array , which contains the resource IDs of the helper texts , which should be added , * as an array of the type { @ link CharSequence } , or an empty array , if no helper texts * should be added */ public final void addAllHelperTextIds ( @ NonNull final int ... resourceIds ) { } }
Condition . INSTANCE . ensureNotNull ( resourceIds , "The array may not be null" ) ; for ( int resourceId : resourceIds ) { addHelperTextId ( resourceId ) ; }
public class CmsChoiceMenuEntryBean { /** * Gets the complete path of this entry , which is a list of attribute ids . < p > * @ return the path of this entry */ public List < String > getPath ( ) { } }
List < String > result = new ArrayList < String > ( ) ; CmsChoiceMenuEntryBean entry = this ; while ( entry != null ) { String pathComponent = entry . getPathComponent ( ) ; if ( pathComponent != null ) { // pathComponent may be null for a dummy root entry result . add ( entry . getPathComponent ( ) ) ; } entry = entry . m_parent ; } Collections . reverse ( result ) ; return result ;
public class WebJsJmsMessageEncoderImpl { /** * Encode an object message body */ private void encodeObjectBody ( StringBuffer result , JsJmsObjectMessage msg ) throws MessageEncodeFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "encodeObjectBody" ) ; try { byte [ ] body = msg . getSerializedObject ( ) ; if ( body != null ) { result . append ( '~' ) ; HexString . binToHex ( body , 0 , body . length , result ) ; } } catch ( ObjectFailedToSerializeException ofse ) { // This should not be possible , as updateDataFields will have been called // earlier so any unserializable object will already have been dealt with . FFDCFilter . processException ( ofse , "com.ibm.ws.sib.mfp.impl.WebJsJmsMessageEncoderImpl.encodeObjectBody" , "225" ) ; throw new MessageEncodeFailedException ( ofse ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "encodeObjectBody" ) ;
public class StandardRoadConnection { /** * Set the temporary buffer of the position of the road connection . * @ param position a position . * @ since 4.0 */ void setPosition ( Point2D < ? , ? > position ) { } }
this . location = position == null ? null : new SoftReference < > ( Point2d . convert ( position ) ) ;
public class ClickCallback { /** * Adds the message area for the confirmation popup to the given row and returns the row to * insert next . */ protected int addConfirmPopupMessage ( SmartTable contents , int row ) { } }
if ( _confirmHTML ) { contents . setHTML ( row , 0 , _confirmMessage , 2 , "Message" ) ; } else { contents . setText ( row , 0 , _confirmMessage , 2 , "Message" ) ; } return row + 1 ;
public class ElasticsearchClientV6 { /** * For Elasticsearch 6 , we need to make sure we are running at least Elasticsearch 6.4 * @ throws IOException when something is wrong while asking the version of the node . */ @ Override public void checkVersion ( ) throws IOException { } }
ESVersion esVersion = getVersion ( ) ; if ( esVersion . major != compatibleVersion ( ) ) { throw new RuntimeException ( "The Elasticsearch client version [" + compatibleVersion ( ) + "] is not compatible with the Elasticsearch cluster version [" + esVersion . toString ( ) + "]." ) ; } if ( esVersion . minor < 4 ) { throw new RuntimeException ( "This version of FSCrawler is not compatible with " + "Elasticsearch version [" + esVersion . toString ( ) + "]. Please upgrade Elasticsearch to at least a 6.4.x version." ) ; }
public class BusinessUtils { /** * Returns the Guice key qualified with the default qualifier configured for the specified class . */ @ SuppressWarnings ( "unchecked" ) public static < T > Optional < Key < T > > resolveDefaultQualifier ( Map < Key < ? > , Class < ? > > bindings , ClassConfiguration < ? > classConfiguration , String property , Class < ? > qualifiedClass , TypeLiteral < T > genericInterface ) { } }
Key < T > key = null ; if ( classConfiguration != null && ! classConfiguration . isEmpty ( ) ) { String qualifierName = classConfiguration . get ( property ) ; if ( qualifierName != null && ! "" . equals ( qualifierName ) ) { try { ClassLoader classLoader = ClassLoaders . findMostCompleteClassLoader ( BusinessUtils . class ) ; Class < ? > qualifierClass = classLoader . loadClass ( qualifierName ) ; if ( Annotation . class . isAssignableFrom ( qualifierClass ) ) { key = Key . get ( genericInterface , ( Class < ? extends Annotation > ) qualifierClass ) ; } else { throw BusinessException . createNew ( BusinessErrorCode . CLASS_IS_NOT_AN_ANNOTATION ) . put ( "class" , qualifiedClass ) . put ( "qualifier" , qualifierName ) ; } } catch ( ClassNotFoundException e ) { key = Key . get ( genericInterface , Names . named ( qualifierName ) ) ; } } } if ( key == null || bindings . containsKey ( Key . get ( key . getTypeLiteral ( ) ) ) ) { return Optional . empty ( ) ; } else { return Optional . of ( key ) ; }
public class X509CertSelector { /** * Sets the policy constraint . The { @ code X509Certificate } must * include at least one of the specified policies in its certificate * policies extension . If { @ code certPolicySet } is empty , then the * { @ code X509Certificate } must include at least some specified policy * in its certificate policies extension . If { @ code certPolicySet } is * { @ code null } , no policy check will be performed . * Note that the { @ code Set } is cloned to protect against * subsequent modifications . * @ param certPolicySet a { @ code Set } of certificate policy OIDs in * string format ( or { @ code null } ) . Each OID is * represented by a set of nonnegative integers * separated by periods . * @ throws IOException if a parsing error occurs on the OID such as * the first component is not 0 , 1 or 2 or the second component is * greater than 39. * @ see # getPolicy */ public void setPolicy ( Set < String > certPolicySet ) throws IOException { } }
if ( certPolicySet == null ) { policySet = null ; policy = null ; } else { // Snapshot set and parse it Set < String > tempSet = Collections . unmodifiableSet ( new HashSet < String > ( certPolicySet ) ) ; /* Convert to Vector of ObjectIdentifiers */ Iterator < String > i = tempSet . iterator ( ) ; Vector < CertificatePolicyId > polIdVector = new Vector < CertificatePolicyId > ( ) ; while ( i . hasNext ( ) ) { Object o = i . next ( ) ; if ( ! ( o instanceof String ) ) { throw new IOException ( "non String in certPolicySet" ) ; } polIdVector . add ( new CertificatePolicyId ( new ObjectIdentifier ( ( String ) o ) ) ) ; } // If everything went OK , make the changes policySet = tempSet ; policy = new CertificatePolicySet ( polIdVector ) ; }
public class CommsByteBuffer { /** * Reads a String from the current position in the byte buffer . * @ return Returns the String */ public synchronized String getString ( ) { } }
checkReleased ( ) ; String returningString = null ; // Read the length in short stringLength = receivedBuffer . getShort ( ) ; // Allocate the right amount of space for it byte [ ] stringBytes = new byte [ stringLength ] ; // And copy the data in receivedBuffer . get ( stringBytes ) ; // If the length is 1 , and the byte is 0x00 , then this is null - so do nothing if ( stringLength == 1 && stringBytes [ 0 ] == 0 ) { // String is null . . . } else { try { returningString = new String ( stringBytes , stringEncoding ) ; } catch ( UnsupportedEncodingException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".getString" , CommsConstants . COMMSBYTEBUFFER_GETSTRING_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "Unable to encode String: " , e ) ; SibTr . exception ( tc , e ) ; } SibTr . error ( tc , "UNSUPPORTED_STRING_ENCODING_SICO8005" , new Object [ ] { stringEncoding , e } ) ; throw new SIErrorException ( TraceNLS . getFormattedMessage ( CommsConstants . MSG_BUNDLE , "UNSUPPORTED_STRING_ENCODING_SICO8005" , new Object [ ] { stringEncoding , e } , null ) ) ; } } return returningString ;
public class AtomixAgent { /** * Parses the command line arguments , returning an argparse4j namespace . * @ param args the arguments to parse * @ return the namespace */ static Namespace parseArgs ( String [ ] args , List < String > unknown ) { } }
ArgumentParser parser = createParser ( ) ; Namespace namespace = null ; try { namespace = parser . parseKnownArgs ( args , unknown ) ; } catch ( ArgumentParserException e ) { parser . handleError ( e ) ; System . exit ( 1 ) ; } return namespace ;
public class ProjectManager { /** * Load the list of projects available to the user , which is represented by the API access key . * Redmine ignores " get trackers info " parameter for " get projects " request . see bug * http : / / www . redmine . org / issues / 8545 * The field is already accessible for a specific project for a long time ( GET / projects / : id ) * but in the projects list ( GET / projects ) it ' s only on the svn trunk for now ( Sep 8 , 2014 ) . * It will be included in Redmine 2.6.0 which isn ' t out yet . * @ return list of Project objects * @ throws RedmineAuthenticationException invalid or no API access key is used with the server , which * requires authorization . Check the constructor arguments . * @ throws RedmineException */ public List < Project > getProjects ( ) throws RedmineException { } }
try { return transport . getObjectsList ( Project . class , new BasicNameValuePair ( "include" , "trackers" ) ) ; } catch ( NotFoundException e ) { throw new RedmineInternalError ( "NotFoundException received, which should never happen in this request" ) ; }
public class MavenJDOMWriter { /** * Method iterateContributor . * @ param counter * @ param childTag * @ param parentTag * @ param list * @ param parent */ protected void iterateContributor ( Counter counter , Element parent , java . util . Collection list , java . lang . String parentTag , java . lang . String childTag ) { } }
boolean shouldExist = ( list != null ) && ( list . size ( ) > 0 ) ; Element element = updateElement ( counter , parent , parentTag , shouldExist ) ; if ( shouldExist ) { Iterator it = list . iterator ( ) ; Iterator elIt = element . getChildren ( childTag , element . getNamespace ( ) ) . iterator ( ) ; if ( ! elIt . hasNext ( ) ) { elIt = null ; } Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; while ( it . hasNext ( ) ) { Contributor value = ( Contributor ) it . next ( ) ; Element el ; if ( ( elIt != null ) && elIt . hasNext ( ) ) { el = ( Element ) elIt . next ( ) ; if ( ! elIt . hasNext ( ) ) { elIt = null ; } } else { el = factory . element ( childTag , element . getNamespace ( ) ) ; insertAtPreferredLocation ( element , el , innerCount ) ; } updateContributor ( value , childTag , innerCount , el ) ; innerCount . increaseCount ( ) ; } if ( elIt != null ) { while ( elIt . hasNext ( ) ) { elIt . next ( ) ; elIt . remove ( ) ; } } }
public class StandardFieldsDialog { /** * Sets the given value to the given field . * The edits are discarded after setting the value , if the field is a { @ link ZapTextField } or { @ link ZapTextArea } . * @ param field the field to set the value . * @ param value the value to set . */ private static void setTextAndDiscardEdits ( JTextComponent field , String value ) { } }
if ( value == null ) { return ; } field . setText ( value ) ; if ( field instanceof ZapTextField ) { ( ( ZapTextField ) field ) . discardAllEdits ( ) ; } else if ( field instanceof ZapTextArea ) { ( ( ZapTextArea ) field ) . discardAllEdits ( ) ; }
public class CustomDomainConfigTypeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CustomDomainConfigType customDomainConfigType , ProtocolMarshaller protocolMarshaller ) { } }
if ( customDomainConfigType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( customDomainConfigType . getCertificateArn ( ) , CERTIFICATEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Webcam { /** * Capture image from webcam and return it . Will return image object or null if webcam is closed * or has been already disposed by JVM . < br > * < br > * < b > IMPORTANT NOTE ! ! ! < / b > < br > * < br > * There are two possible behaviors of what webcam should do when you try to get image and * webcam is actually closed . Normally it will return null , but there is a special flag which * can be statically set to switch all webcams to auto open mode . In this mode , webcam will be * automatically open , when you try to get image from closed webcam . Please be aware of some * side effects ! In case of multi - threaded applications , there is no guarantee that one thread * will not try to open webcam even if it was manually closed in different thread . * @ return Captured image or null if webcam is closed or disposed by JVM */ public BufferedImage getImage ( ) { } }
if ( ! isReady ( ) ) { return null ; } long t1 = 0 ; long t2 = 0 ; if ( asynchronous ) { return updater . getImage ( ) ; } else { // get image t1 = System . currentTimeMillis ( ) ; BufferedImage image = transform ( new WebcamGetImageTask ( driver , device ) . getImage ( ) ) ; t2 = System . currentTimeMillis ( ) ; if ( image == null ) { return null ; } // get FPS if ( device instanceof WebcamDevice . FPSSource ) { fps = ( ( WebcamDevice . FPSSource ) device ) . getFPS ( ) ; } else { // + 1 to avoid division by zero fps = ( 4 * fps + 1000 / ( t2 - t1 + 1 ) ) / 5 ; } // notify webcam listeners about new image available notifyWebcamImageAcquired ( image ) ; return image ; }
public class Container { /** * Find whether any instance of a particular component is assigned to the container * @ return an optional including the InstancePlan if found */ private Optional < PackingPlan . InstancePlan > getAnyInstanceOfComponent ( String componentName ) { } }
for ( PackingPlan . InstancePlan instancePlan : this . instances ) { if ( instancePlan . getComponentName ( ) . equals ( componentName ) ) { return Optional . of ( instancePlan ) ; } } return Optional . absent ( ) ;
public class GossipMonger { /** * Shutdown GossipMonger and associated Threads */ public void shutdown ( ) { } }
singleton = null ; log . info ( "Shuting down GossipMonger" ) ; isShutdown . set ( true ) ; threadPool . shutdown ( ) ; // Disable new tasks from being submitted // TODO : Wait for messages to end processing shutdownAndAwaitTermination ( threadPool ) ; // Clean ThreadLocal ref . remove ( ) ;
public class TwoPhaseCoreAnnotationProcessor { /** * Implements CoreAnnotationProcessor . process ( ) as two phases , " check " and " generate " . * " generate " will not be called if " check " emitted any errors ( via printError ( ) ) . */ public void process ( ) { } }
try { check ( ) ; boolean isReconcilePhase = false ; // In the Eclipse IDE ' s integration between JDT and APT , annotation processors // run in two phases - - reconcile and build . These translate into the // check and generate phases for a TwoPhaseAnnotationProcessor . In order to // optimize for hosting in this environment ( and other IDE - centric AP environments ) // the generate phase can be cut out when performing only the check phase . // Custom AP environments that wish to control this shoudl set the " phase " flag of the // annotation processor to " RECONCILE " . try { String phase = ( String ) CompilerUtils . isReconcilePhase ( getAnnotationProcessorEnvironment ( ) ) ; isReconcilePhase = "RECONCILE" . equals ( phase ) ; } catch ( FatalCompileTimeException e ) { e . printDiagnostic ( this ) ; } // Do not call generate if check resulted in errors of if the AP is running in // a phase called " reconcile " if ( ! isReconcilePhase && ! hasErrors ( ) ) { generate ( ) ; } } catch ( FatalCompileTimeException e ) { e . printDiagnostic ( this ) ; }
public class Slice { /** * Gets a 64 - bit long integer at the specified absolute { @ code index } in * this buffer . * @ throws IndexOutOfBoundsException if the specified { @ code index } is less than { @ code 0 } or * { @ code index + 8 } is greater than { @ code this . capacity } */ public long getLong ( int index ) { } }
checkPositionIndexes ( index , index + SIZE_OF_LONG , this . length ) ; index += offset ; return ( ( long ) data [ index ] & 0xff ) | ( ( long ) data [ index + 1 ] & 0xff ) << 8 | ( ( long ) data [ index + 2 ] & 0xff ) << 16 | ( ( long ) data [ index + 3 ] & 0xff ) << 24 | ( ( long ) data [ index + 4 ] & 0xff ) << 32 | ( ( long ) data [ index + 5 ] & 0xff ) << 40 | ( ( long ) data [ index + 6 ] & 0xff ) << 48 | ( ( long ) data [ index + 7 ] & 0xff ) << 56 ;
public class EsStorage { /** * Indexes an entity . * @ param type * @ param id * @ param sourceEntity * @ param refresh true if the operation should wait for a refresh before it returns * @ throws StorageException */ @ SuppressWarnings ( "nls" ) private void indexEntity ( String type , String id , XContentBuilder sourceEntity , boolean refresh ) throws StorageException { } }
try { String json = sourceEntity . string ( ) ; JestResult response = esClient . execute ( new Index . Builder ( json ) . refresh ( refresh ) . index ( getIndexName ( ) ) . setParameter ( Parameters . OP_TYPE , "create" ) . type ( type ) . id ( id ) . build ( ) ) ; if ( ! response . isSucceeded ( ) ) { throw new StorageException ( "Failed to index document " + id + " of type " + type + ": " + response . getErrorMessage ( ) ) ; } } catch ( StorageException e ) { throw e ; } catch ( Exception e ) { throw new StorageException ( e ) ; }
public class BatchObjectUpdater { /** * has no value for its sharding - field , leave the map empty for that object ID . */ private Map < String , Integer > getShardNumbers ( String tableName , Set < String > targObjIDs ) { } }
TableDefinition tableDef = m_tableDef . getAppDef ( ) . getTableDef ( tableName ) ; FieldDefinition shardField = tableDef . getShardingField ( ) ; Map < String , String > shardFieldMap = SpiderService . instance ( ) . getObjectScalar ( tableDef , targObjIDs , shardField . getName ( ) ) ; Map < String , Integer > shardNoMap = new HashMap < > ( ) ; for ( String objID : shardFieldMap . keySet ( ) ) { Date shardingFieldDate = Utils . dateFromString ( shardFieldMap . get ( objID ) ) ; int shardNo = tableDef . computeShardNumber ( shardingFieldDate ) ; shardNoMap . put ( objID , shardNo ) ; } return shardNoMap ;
public class GodHandEpilogue { /** * Handle too many SQL executions . * @ param runtime The runtime meta of action execute . ( NotNull ) * @ param sqlCounter The counter object for SQL executions . ( NotNull ) * @ param sqlExecutionCountLimit The limit of SQL execution count for the action execute . ( NotMinus : already checked here ) */ protected void handleTooManySqlExecution ( ActionRuntime runtime , ExecutedSqlCounter sqlCounter , int sqlExecutionCountLimit ) { } }
final int totalCountOfSql = sqlCounter . getTotalCountOfSql ( ) ; final String actionDisp = buildActionDisp ( runtime ) ; logger . warn ( "*Too many SQL executions: {}/{} in {}" , totalCountOfSql , sqlExecutionCountLimit , actionDisp ) ;
public class StreamConduit { /** * Set the < code > OutputStream < / code > by means of which * input can be sent to the process . * @ param os the < code > OutputStream < / code > . */ private void setProcessInputStream ( OutputStream os ) { } }
if ( input != null ) { inputThread = createPump ( input , os , true ) ; } else { AntFileUtils . close ( os ) ; }
public class RGroupList { /** * Validates the occurrence value . * < UL > * < LI > n : exactly n ; < / LI > * < LI > n - m : n through m ; < / LI > * < LI > & # 62 ; n : greater than n ; < / LI > * < LI > & # 60 ; n : fewer than n ; < / LI > * < LI > default ( blank ) is & gt ; 0 ; < / LI > * < / UL > * Any combination of the preceding values is also * allowed ; for example " 1 , 3-7 , 9 , & gt ; 11 " . * @ param occ String to validate . * @ return true if valid String provided . */ public static boolean isValidOccurrenceSyntax ( String occ ) { } }
StringTokenizer st = new StringTokenizer ( occ , "," ) ; while ( st . hasMoreTokens ( ) ) { String cond = st . nextToken ( ) . trim ( ) . replaceAll ( " " , "" ) ; do { // Number : " n " if ( match ( "^\\d+$" , cond ) ) { if ( Integer . valueOf ( cond ) < 0 ) // not allowed return false ; break ; } // Range : " n - m " if ( match ( "^\\d+-\\d+$" , cond ) ) { int from = Integer . valueOf ( cond . substring ( 0 , cond . indexOf ( '-' ) ) ) ; int to = Integer . valueOf ( cond . substring ( cond . indexOf ( '-' ) + 1 , cond . length ( ) ) ) ; if ( from < 0 || to < 0 || to < from ) // not allowed return false ; break ; } // Smaller than : " < n " if ( match ( "^<\\d+$" , cond ) ) { int n = Integer . valueOf ( cond . substring ( cond . indexOf ( '<' ) + 1 , cond . length ( ) ) ) ; if ( n == 0 ) // not allowed return false ; break ; } // Greater than : " > n " if ( match ( "^>\\d+$" , cond ) ) { break ; } return false ; } while ( 1 == 0 ) ; } return true ;
public class DateTimeUtil { /** * 获取日期所在周的结束时间 ( 约定为周六 ) * @ param date 时间 ( { @ link Date } ) * @ return 时间 ( { @ link java . util . Date } ) */ public static Date getEndOfWeek ( Date date ) { } }
if ( date == null ) return null ; Calendar calendar = Calendar . getInstance ( ) ; calendar . setTime ( date ) ; calendar . setFirstDayOfWeek ( FIRST_DAY_OF_WEEK ) ; calendar . set ( Calendar . DAY_OF_WEEK , Calendar . SATURDAY ) ; return calendar . getTime ( ) ;
public class ArrayFunctions { /** * Returned expression results in the first position of value within the array , or - 1. * Array position is zero - based , i . e . the first position is 0. */ public static Expression arrayPosition ( String expression , Expression value ) { } }
return arrayPosition ( x ( expression ) , value ) ;
public class AbstractLinear { /** * Returns the track image with the given values . * @ param WIDTH * @ param HEIGHT * @ param MIN _ VALUE * @ param MAX _ VALUE * @ param TRACK _ START * @ param TRACK _ SECTION * @ param TRACK _ STOP * @ param TRACK _ START _ COLOR * @ param TRACK _ SECTION _ COLOR * @ param TRACK _ STOP _ COLOR * @ return a buffered image of the track colored with the given values */ protected BufferedImage create_TRACK_Image ( final int WIDTH , final int HEIGHT , final double MIN_VALUE , final double MAX_VALUE , final double TRACK_START , final double TRACK_SECTION , final double TRACK_STOP , final Color TRACK_START_COLOR , final Color TRACK_SECTION_COLOR , final Color TRACK_STOP_COLOR ) { } }
return create_TRACK_Image ( WIDTH , HEIGHT , MIN_VALUE , MAX_VALUE , TRACK_START , TRACK_SECTION , TRACK_STOP , TRACK_START_COLOR , TRACK_SECTION_COLOR , TRACK_STOP_COLOR , null ) ;
public class MPGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . MPG__RG : getRG ( ) . clear ( ) ; getRG ( ) . addAll ( ( Collection < ? extends MPGRG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class CustomUserRegistryWrapper { /** * { @ inheritDoc } */ @ Override @ FFDCIgnore ( com . ibm . websphere . security . EntryNotFoundException . class ) public String getGroupSecurityName ( String uniqueGroupId ) throws EntryNotFoundException , RegistryException { } }
try { return customUserRegistry . getGroupSecurityName ( uniqueGroupId ) ; } catch ( com . ibm . websphere . security . EntryNotFoundException e ) { throw new EntryNotFoundException ( e . getMessage ( ) , e ) ; } catch ( Exception e ) { throw new RegistryException ( e . getMessage ( ) , e ) ; }
public class PP20 { /** * If successful , allocates a new buffer containing the uncompresse data and * returns the uncompressed length . Else , returns 0. * @ return */ public int /* udword _ ppt */ decompress ( final short [ ] /* const void * */ source , int /* udword _ ppt */ size , final Decompressed decomp ) { } }
this . source = source ; globalError = false ; // assume no error readPtr = 0 ; if ( ! isCompressed ( source , size ) ) { return 0 ; } // Uncompressed size is stored at end of source file . // Backwards decompression . readPtr += ( size - 4 ) ; int /* udword _ ppt */ lastDword = readBEdword ( source , readPtr ) ; // Uncompressed length in bits 31-8 of last dword . int /* udword _ ppt */ outputLen = lastDword >> 8 ; // Allocate memory for output data . dest = new short /* ubyte _ ppt */ [ outputLen ] ; // Lowest dest . address for range - checks . // Put destptr to end of uncompressed data . writePtr = outputLen ; // Read number of unused bits in 1st data dword // from lowest bits 7-0 of last dword . bits = 32 - ( lastDword & 0xFF ) ; // Main decompression loop . bytesTOdword ( ) ; if ( bits != 32 ) current >>= ( 32 - bits ) ; do { if ( readBits ( 1 ) == 0 ) bytes ( ) ; if ( writePtr > 0 ) sequence ( ) ; if ( globalError ) { // statusString already set . outputLen = 0 ; // unsuccessful decompression break ; } } while ( writePtr > 0 ) ; // Finished . if ( outputLen > 0 ) // successful { decomp . destBufRef = new short [ dest . length ] ; // Free any previously existing destination buffer . System . arraycopy ( dest , 0 , decomp . destBufRef , 0 , dest . length ) ; } return outputLen ;
public class XMLUtils { /** * Serializes a DOM document * @ param doc * @ throws CitrusRuntimeException * @ return serialized XML string */ public static String serialize ( Document doc ) { } }
LSSerializer serializer = configurer . createLSSerializer ( ) ; LSOutput output = configurer . createLSOutput ( ) ; String charset = getTargetCharset ( doc ) . displayName ( ) ; output . setEncoding ( charset ) ; StringWriter writer = new StringWriter ( ) ; output . setCharacterStream ( writer ) ; serializer . write ( doc , output ) ; return writer . toString ( ) ;
public class Util { /** * Works like { @ link String # indexOf ( int ) } but ' not found ' is returned as s . length ( ) , not - 1. * This enables more straight - forward comparison . */ private static int _indexOf ( @ Nonnull String s , char ch ) { } }
int idx = s . indexOf ( ch ) ; if ( idx < 0 ) return s . length ( ) ; return idx ;
public class CmsDynamicFunctionBean { /** * Finds the correct format for a given container type and width . < p > * @ param cms the current CMS context * @ param type the container type * @ param width the container width * @ return the format for the given container type and width */ public Format getFormatForContainer ( CmsObject cms , String type , int width ) { } }
IdentityHashMap < CmsFormatterBean , Format > formatsByFormatter = new IdentityHashMap < CmsFormatterBean , Format > ( ) ; // relate formatters to formats so we can pick the corresponding format after a formatter has been selected CmsFormatterBean mainFormatter = createFormatterBean ( m_mainFormat , true ) ; formatsByFormatter . put ( mainFormatter , m_mainFormat ) ; List < I_CmsFormatterBean > formatters = new ArrayList < I_CmsFormatterBean > ( ) ; for ( Format format : m_otherFormats ) { CmsFormatterBean formatter = createFormatterBean ( format , false ) ; formatsByFormatter . put ( formatter , format ) ; formatters . add ( formatter ) ; } formatters . add ( 0 , mainFormatter ) ; CmsFormatterConfiguration formatterConfiguration = CmsFormatterConfiguration . create ( cms , formatters ) ; I_CmsFormatterBean matchingFormatter = formatterConfiguration . getDefaultFormatter ( type , width ) ; if ( matchingFormatter == null ) { return null ; } return formatsByFormatter . get ( matchingFormatter ) ;
public class SrcGen4J { /** * Returns a file filter that combines all filters for all incremental parsers . It should be used for selecting the appropriate files . * @ return File filter . */ @ NotNull public FileFilter getFileFilter ( ) { } }
if ( fileFilter == null ) { final List < IOFileFilter > filters = new ArrayList < > ( ) ; final Parsers parsers = config . getParsers ( ) ; if ( parsers != null ) { final List < ParserConfig > parserConfigs = parsers . getList ( ) ; if ( parserConfigs != null ) { for ( final ParserConfig pc : parserConfigs ) { final Parser < Object > pars = pc . getParser ( ) ; if ( pars instanceof IncrementalParser ) { final IncrementalParser < ? > parser = ( IncrementalParser < ? > ) pars ; filters . add ( parser . getFileFilter ( ) ) ; } } } } fileFilter = new OrFileFilter ( filters ) ; } return fileFilter ;
public class NonUniformMutation { /** * Perform the mutation operation * @ param probability Mutation setProbability * @ param solution The solution to mutate */ public void doMutation ( double probability , DoubleSolution solution ) { } }
for ( int i = 0 ; i < solution . getNumberOfVariables ( ) ; i ++ ) { if ( randomGenenerator . getRandomValue ( ) < probability ) { double rand = randomGenenerator . getRandomValue ( ) ; double tmp ; if ( rand <= 0.5 ) { tmp = delta ( solution . getUpperBound ( i ) - solution . getVariableValue ( i ) , perturbation ) ; tmp += solution . getVariableValue ( i ) ; } else { tmp = delta ( solution . getLowerBound ( i ) - solution . getVariableValue ( i ) , perturbation ) ; tmp += solution . getVariableValue ( i ) ; } if ( tmp < solution . getLowerBound ( i ) ) { tmp = solution . getLowerBound ( i ) ; } else if ( tmp > solution . getUpperBound ( i ) ) { tmp = solution . getUpperBound ( i ) ; } solution . setVariableValue ( i , tmp ) ; } }
public class F0 { /** * Returns a composed function that when applied , try to apply this function first , in case * a { @ link java . lang . RuntimeException } is captured apply to the fallback function specified . This * method helps to implement partial function * @ param fallback * the function to applied if this function doesn ' t apply in the current situation * @ return the final result */ public F0 < R > orElse ( final Func0 < ? extends R > fallback ) { } }
final F0 < R > me = this ; return new F0 < R > ( ) { @ Override public R apply ( ) { try { return me . apply ( ) ; } catch ( RuntimeException e ) { return fallback . apply ( ) ; } } } ;
public class HBaseReader { /** * ( non - Javadoc ) * @ see * com . impetus . client . hbase . Reader # loadAll ( org . apache . hadoop . hbase . client * . HTable , org . apache . hadoop . hbase . filter . Filter , byte [ ] , byte [ ] ) */ @ Override public List < HBaseData > loadAll ( HTableInterface hTable , Filter filter , byte [ ] startRow , byte [ ] endRow , String columnFamily , String qualifier , String [ ] columns ) throws IOException { } }
List < HBaseData > results = null ; if ( scanner == null ) { Scan s = null ; if ( startRow != null && endRow != null && startRow . equals ( endRow ) ) { Get g = new Get ( startRow ) ; s = new Scan ( g ) ; } else if ( startRow != null && endRow != null ) { s = new Scan ( startRow , endRow ) ; } else if ( startRow != null ) { s = new Scan ( startRow ) ; } else if ( endRow != null ) { s = new Scan ( ) ; s . setStopRow ( endRow ) ; } else { s = new Scan ( ) ; } setScanCriteria ( filter , columnFamily , qualifier , s , columns ) ; scanner = hTable . getScanner ( s ) ; resultsIter = scanner . iterator ( ) ; } return scanResults ( null , results ) ;
public class UIComponentBase { /** * < p class = " changed _ added _ 2_0 " > This is a default implementation of * { @ link javax . faces . component . behavior . ClientBehaviorHolder # addClientBehavior } . * < code > UIComponent < / code > does not implement the * { @ link javax . faces . component . behavior . ClientBehaviorHolder } interface , * but provides default implementations for the methods defined by * { @ link javax . faces . component . behavior . ClientBehaviorHolder } to simplify * subclass implementations . Subclasses that wish to support the * { @ link javax . faces . component . behavior . ClientBehaviorHolder } contract must * declare that the subclass implements * { @ link javax . faces . component . behavior . ClientBehaviorHolder } , and must provide * an implementation of * { @ link javax . faces . component . behavior . ClientBehaviorHolder # getEventNames } . < / p > * @ param eventName the logical name of the client - side event to attach * the behavior to . * @ param behavior the { @ link javax . faces . component . behavior . Behavior } * instance to attach for the specified event name . * @ since 2.0 */ public void addClientBehavior ( String eventName , ClientBehavior behavior ) { } }
assertClientBehaviorHolder ( ) ; // First , make sure that the event is supported . We don ' t want // to bother attaching behaviors for unsupported events . Collection < String > eventNames = getEventNames ( ) ; // getClientEventNames ( ) is spec ' ed to require a non - null Set . // If getClientEventNames ( ) returns null , throw an exception // to indicate that the API in not being used properly . if ( eventNames == null ) { throw new IllegalStateException ( "Attempting to add a Behavior to a component " + "that does not support any event types. " + "getEventTypes() must return a non-null Set." ) ; } if ( eventNames . contains ( eventName ) ) { if ( initialStateMarked ( ) ) { // a Behavior has been added dynamically . Update existing // Behaviors , if any , to save their full state . if ( behaviors != null ) { for ( Entry < String , List < ClientBehavior > > entry : behaviors . entrySet ( ) ) { for ( ClientBehavior b : entry . getValue ( ) ) { if ( b instanceof PartialStateHolder ) { ( ( PartialStateHolder ) behavior ) . clearInitialState ( ) ; } } } } } // We ' ve got an event that we support , create our Map // if necessary if ( null == behaviors ) { // Typically we only have a small number of behaviors for // any component - in most cases only 1 . Using a very small // initial capacity so that we keep the footprint to a minimum . Map < String , List < ClientBehavior > > modifiableMap = new HashMap < String , List < ClientBehavior > > ( 5 , 1.0f ) ; behaviors = new BehaviorsMap ( modifiableMap ) ; } List < ClientBehavior > eventBehaviours = behaviors . get ( eventName ) ; if ( null == eventBehaviours ) { // Again using small initial capacity - we typically // only have 1 Behavior per event type . eventBehaviours = new ArrayList < ClientBehavior > ( 3 ) ; behaviors . getModifiableMap ( ) . put ( eventName , eventBehaviours ) ; } eventBehaviours . add ( behavior ) ; }
public class JmsDestinationImpl { /** * Get the reply reliability to use for reply mesages on a replyTo destination * @ return the reply reliability */ protected Reliability getReplyReliability ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getReplyReliability" ) ; Reliability result = null ; if ( replyReliabilityByte != - 1 ) { result = Reliability . getReliability ( replyReliabilityByte ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getReplyReliability" , result ) ; return result ;
public class CmsExtendedWorkflowManager { /** * Sends the notification for released resources . < p > * @ param userCms the user ' s CMS context * @ param recipient the OpenCms user to whom the notification should be sent * @ param workflowProject the workflow project which * @ param resources the resources which have been affected by a workflow action */ protected void sendNotification ( CmsObject userCms , CmsUser recipient , CmsProject workflowProject , List < CmsResource > resources ) { } }
try { String linkHref = OpenCms . getLinkManager ( ) . getServerLink ( userCms , "/system/workplace/commons/publish.jsp?" + CmsPublishService . PARAM_PUBLISH_PROJECT_ID + "=" + workflowProject . getUuid ( ) + "&" + CmsPublishService . PARAM_CONFIRM + "=true" ) ; CmsWorkflowNotification notification = new CmsWorkflowNotification ( m_adminCms , userCms , recipient , getNotificationResource ( m_adminCms ) , workflowProject , resources , linkHref ) ; notification . send ( ) ; } catch ( Throwable e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; }
public class ApiOvhTelephony { /** * Alter this object properties * REST : PUT / telephony / { billingAccount } / number / { serviceName } * @ param body [ required ] New object properties * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] Name of the service */ public void billingAccount_number_serviceName_PUT ( String billingAccount , String serviceName , OvhNumber body ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/number/{serviceName}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class JsHdrsImpl { /** * Get the JsMsgPart which contains the JMF Message described by the * JsApi schema if it is already fluffed up ( i . e . cached ) . * If the part is not already fluffed and cached , just return null as the caller * does NOT want to fluff it up . * @ return JsMsgPart The message part described by the JsApi schema , or null */ synchronized final JsMsgPart getApiIfFluffed ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { if ( api == null ) SibTr . debug ( this , tc , "getApiIfFluffed returning null" ) ; } return api ;
public class CoreServiceImpl { /** * DS - driven de - activation */ @ Deactivate protected void deactivate ( int reason ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "File monitor service deactivated" , "reason=" + reason , fileMonitors ) ; } for ( Map . Entry < ServiceReference < FileMonitor > , MonitorHolder > entry : fileMonitors . entrySet ( ) ) { entry . getValue ( ) . destroy ( ) ; } fileMonitors . clear ( ) ; this . cContext = null ;
public class RouteBuilder { /** * Specifies that this route is mapped to HTTP GET method . * @ return instance of { @ link RouteBuilder } . */ public RouteBuilder get ( ) { } }
if ( ! methods . contains ( HttpMethod . GET ) ) { methods . add ( HttpMethod . GET ) ; } return this ;
public class NodeTypes { /** * Determine if the named node type or any of the mixin types subtypes the ' mode : unorderedCollection ' type . * @ param nodeTypeName the node type name ; may be null * @ param mixinTypes the mixin type names ; may be null or empty * @ return true if any of the named node type is an unordered collection , or false otherwise */ public boolean isUnorderedCollection ( Name nodeTypeName , Collection < Name > mixinTypes ) { } }
if ( nodeTypeName != null && nodeTypeNamesThatAreUnorderableCollections . contains ( nodeTypeName ) ) { return true ; } if ( mixinTypes != null && ! mixinTypes . isEmpty ( ) ) { for ( Name mixin : mixinTypes ) { if ( nodeTypeNamesThatAreUnorderableCollections . contains ( mixin ) ) { return true ; } } } return false ;
public class KeyAffinityServiceFactory { /** * Same as { @ link # newKeyAffinityService ( org . infinispan . Cache , java . util . concurrent . Executor , KeyGenerator , int , * boolean ) } with start = = true ; */ public static < K , V > KeyAffinityService < K > newKeyAffinityService ( Cache < K , V > cache , Executor ex , KeyGenerator < K > keyGenerator , int keyBufferSize ) { } }
return newKeyAffinityService ( cache , ex , keyGenerator , keyBufferSize , true ) ;
public class HotSpotVirtualMachine { /** * Load agent library * If isAbsolute is true then the agent library is the absolute path * to the library and thus will not be expanded in the target VM . * if isAbsolute is false then the agent library is just a library * name and it will be expended in the target VM . */ private void loadAgentLibrary ( String agentLibrary , boolean isAbsolute , String options ) throws AgentLoadException , AgentInitializationException , IOException { } }
InputStream in = execute ( "load" , agentLibrary , isAbsolute ? "true" : "false" , options ) ; try { int result = readInt ( in ) ; if ( result != 0 ) { throw new AgentInitializationException ( "Agent_OnAttach failed" , result ) ; } } finally { in . close ( ) ; }
public class HttpServletRequestInjectionProxy { /** * ( non - Javadoc ) * @ see javax . servlet . ServletRequestWrapper # isWrapperFor ( java . lang . Class ) */ @ Override public boolean isWrapperFor ( @ SuppressWarnings ( "rawtypes" ) Class wrappedType ) { } }
if ( ! ServletRequest . class . isAssignableFrom ( wrappedType ) ) { throw new IllegalArgumentException ( "Given class " + wrappedType . getName ( ) + " not a subinterface of " + ServletRequest . class . getName ( ) ) ; } final ServletRequest request = getHttpServletRequest ( ) ; @ SuppressWarnings ( "unchecked" ) final Class < ? extends ServletRequest > wrappedServletType = wrappedType ; if ( wrappedServletType . isAssignableFrom ( request . getClass ( ) ) ) { return true ; } else if ( request instanceof ServletRequestWrapper ) { return ( ( ServletRequestWrapper ) request ) . isWrapperFor ( wrappedType ) ; } else { return false ; }
public class Builder { /** * Declares a script to be run as part of the RPM pre - uninstallation . The * script will be run using the interpreter declared with the * { @ link # setPreUninstallProgram ( String ) } method . * @ param script Script contents to run ( i . e . shell commands ) */ public void setPreUninstallScript ( final String script ) { } }
setPreUninstallProgram ( readProgram ( script ) ) ; if ( script != null ) format . getHeader ( ) . createEntry ( PREUNSCRIPT , script ) ;
public class GeneratedDOAuth2UserDaoImpl { /** * find - by method for unique field email * @ param email the unique attribute * @ return the unique DOAuth2User for the specified email */ public DOAuth2User findByEmail ( java . lang . String email ) { } }
return queryUniqueByField ( null , DOAuth2UserMapper . Field . EMAIL . getFieldName ( ) , email ) ;
public class DynamoDBService { /** * Set the region or endpoint in m _ ddbClient */ private void setRegionOrEndPoint ( ) { } }
String regionName = getParamString ( "ddb_region" ) ; if ( regionName != null ) { Regions regionEnum = Regions . fromName ( regionName ) ; Utils . require ( regionEnum != null , "Unknown 'ddb_region': " + regionName ) ; m_logger . info ( "Using region: {}" , regionName ) ; m_ddbClient . setRegion ( Region . getRegion ( regionEnum ) ) ; } else { String ddbEndpoint = getParamString ( "ddb_endpoint" ) ; Utils . require ( ddbEndpoint != null , "Either 'ddb_region' or 'ddb_endpoint' must be defined for tenant: " + m_tenant . getName ( ) ) ; m_logger . info ( "Using endpoint: {}" , ddbEndpoint ) ; m_ddbClient . setEndpoint ( ddbEndpoint ) ; }
public class CmsUserTable { /** * Returns status help message . * @ param user CmsUser * @ param disabled boolean * @ param newUser boolean * @ return String */ String getStatusHelp ( CmsUser user , boolean disabled , boolean newUser ) { } }
if ( disabled ) { return CmsVaadinUtils . getMessageText ( Messages . GUI_USERMANAGEMENT_USER_DISABLED_HELP_0 ) ; } if ( newUser ) { return CmsVaadinUtils . getMessageText ( Messages . GUI_USERMANAGEMENT_USER_INACTIVE_HELP_0 ) ; } if ( isUserPasswordReset ( user ) ) { return CmsVaadinUtils . getMessageText ( Messages . GUI_USERMANAGEMENT_USER_PASSWORT_RESET_HELP_0 ) ; } long lastLogin = user . getLastlogin ( ) ; return CmsVaadinUtils . getMessageText ( Messages . GUI_USERMANAGEMENT_USER_ACTIVE_HELP_1 , CmsDateUtil . getDateTime ( new Date ( lastLogin ) , DateFormat . SHORT , A_CmsUI . get ( ) . getLocale ( ) ) ) ;
public class BaseUniqueIDGenerator { /** * { @ inheritDoc } */ @ Override public synchronized byte [ ] generate ( ) throws GeneratorException { } }
long now = System . currentTimeMillis ( ) ; if ( now == previousTimestamp ) { sequence ++ ; } else { sequence = 0 ; } if ( sequence > Blueprint . MAX_SEQUENCE_COUNTER ) { try { TimeUnit . MILLISECONDS . sleep ( 1 ) ; return generate ( ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } previousTimestamp = now ; Blueprint blueprint = new Blueprint ( now , sequence , generatorIdentityHolder . getGeneratorId ( ) , generatorIdentityHolder . getClusterId ( ) , mode ) ; return IDBuilder . build ( blueprint ) ;
public class SSESpecificationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SSESpecification sSESpecification , ProtocolMarshaller protocolMarshaller ) { } }
if ( sSESpecification == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sSESpecification . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( sSESpecification . getSSEType ( ) , SSETYPE_BINDING ) ; protocolMarshaller . marshall ( sSESpecification . getKMSMasterKeyId ( ) , KMSMASTERKEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Primitives { /** * Returns the primitive type of the given class . * The passed class can be any class : < code > boolean . class < / code > , < code > Integer . class < / code > * in witch case this method will return < code > boolean . class < / code > , even < code > SomeObject . class < / code > * in which case < code > null < / code > will be returned . * @ param clazz The class from which primitive type has to be retrieved * @ param < T > The type * @ return The primitive type if relevant , otherwise < code > null < / code > */ public static < T > Class < T > primitiveTypeOf ( Class < T > clazz ) { } }
if ( clazz . isPrimitive ( ) ) { return clazz ; } return ( Class < T > ) PRIMITIVE_TYPES . get ( clazz ) ;
public class InternalPartitionServiceImpl { /** * Sets the { @ code partitionState } if the node is started and the state is sent by the master known by this node . * @ param partitionState the new partition state * @ return { @ code true } if the partition state was applied */ public boolean processPartitionRuntimeState ( final PartitionRuntimeState partitionState ) { } }
Address sender = partitionState . getMaster ( ) ; if ( ! node . getNodeExtension ( ) . isStartCompleted ( ) ) { logger . warning ( "Ignoring received partition table, startup is not completed yet. Sender: " + sender ) ; return false ; } if ( ! validateSenderIsMaster ( sender , "partition table update" ) ) { return false ; } return applyNewPartitionTable ( partitionState . getPartitionTable ( ) , partitionState . getVersion ( ) , partitionState . getCompletedMigrations ( ) , sender ) ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # sub ( org . joml . Matrix4x3dc , org . joml . Matrix4x3d ) */ public Matrix4x3d sub ( Matrix4x3dc subtrahend , Matrix4x3d dest ) { } }
dest . m00 = m00 - subtrahend . m00 ( ) ; dest . m01 = m01 - subtrahend . m01 ( ) ; dest . m02 = m02 - subtrahend . m02 ( ) ; dest . m10 = m10 - subtrahend . m10 ( ) ; dest . m11 = m11 - subtrahend . m11 ( ) ; dest . m12 = m12 - subtrahend . m12 ( ) ; dest . m20 = m20 - subtrahend . m20 ( ) ; dest . m21 = m21 - subtrahend . m21 ( ) ; dest . m22 = m22 - subtrahend . m22 ( ) ; dest . m30 = m30 - subtrahend . m30 ( ) ; dest . m31 = m31 - subtrahend . m31 ( ) ; dest . m32 = m32 - subtrahend . m32 ( ) ; dest . properties = 0 ; return dest ;
public class PlanNode { /** * Apply the operation to all ancestor nodes below a node of the given type . * @ param stopType the type of node that should not be included in the results ; may not be null * @ param operation the operation to apply to each of the ancestor nodes below the given type ; may not be null */ public void applyToAncestorsUpTo ( Type stopType , Operation operation ) { } }
PlanNode ancestor = getParent ( ) ; while ( ancestor != null ) { if ( ancestor . getType ( ) == stopType ) return ; operation . apply ( ancestor ) ; ancestor = ancestor . getParent ( ) ; }
public class TrainingFormHandler { /** * Generates the InputValue for the form input by inspecting the current * value of the corresponding WebElement on the DOM . * @ return The current InputValue for the element on the DOM . */ private InputValue getInputValue ( FormInput input ) { } }
/* Get the DOM element from Selenium . */ WebElement inputElement = browser . getWebElement ( input . getIdentification ( ) ) ; switch ( input . getType ( ) ) { case TEXT : case PASSWORD : case HIDDEN : case SELECT : case TEXTAREA : return new InputValue ( inputElement . getAttribute ( "value" ) ) ; case RADIO : case CHECKBOX : default : String value = inputElement . getAttribute ( "value" ) ; Boolean checked = inputElement . isSelected ( ) ; return new InputValue ( value , checked ) ; }
public class AbstractSequentialList { /** * Replaces the element at the specified position in this list with the * specified element ( optional operation ) . * < p > This implementation first gets a list iterator pointing to the * indexed element ( with < tt > listIterator ( index ) < / tt > ) . Then , it gets * the current element using < tt > ListIterator . next < / tt > and replaces it * with < tt > ListIterator . set < / tt > . * < p > Note that this implementation will throw an * < tt > UnsupportedOperationException < / tt > if the list iterator does not * implement the < tt > set < / tt > operation . * @ throws UnsupportedOperationException { @ inheritDoc } * @ throws ClassCastException { @ inheritDoc } * @ throws NullPointerException { @ inheritDoc } * @ throws IllegalArgumentException { @ inheritDoc } * @ throws IndexOutOfBoundsException { @ inheritDoc } */ public E set ( int index , E element ) { } }
try { ListIterator < E > e = listIterator ( index ) ; E oldVal = e . next ( ) ; e . set ( element ) ; return oldVal ; } catch ( NoSuchElementException exc ) { throw new IndexOutOfBoundsException ( "Index: " + index ) ; }
public class SarlAssertExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void setCondition ( XExpression newCondition ) { } }
if ( newCondition != condition ) { NotificationChain msgs = null ; if ( condition != null ) msgs = ( ( InternalEObject ) condition ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - SarlPackage . SARL_ASSERT_EXPRESSION__CONDITION , null , msgs ) ; if ( newCondition != null ) msgs = ( ( InternalEObject ) newCondition ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - SarlPackage . SARL_ASSERT_EXPRESSION__CONDITION , null , msgs ) ; msgs = basicSetCondition ( newCondition , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , SarlPackage . SARL_ASSERT_EXPRESSION__CONDITION , newCondition , newCondition ) ) ;
public class BuildsInner { /** * Gets all the builds for a registry . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param filter The builds filter to apply on the operation . * @ param top $ top is supported for get list of builds , which limits the maximum number of builds to return . * @ param skipToken $ skipToken is supported on get list of builds , which provides the next page in the list of builds . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; BuildInner & gt ; object */ public Observable < Page < BuildInner > > listAsync ( final String resourceGroupName , final String registryName , final String filter , final Integer top , final String skipToken ) { } }
return listWithServiceResponseAsync ( resourceGroupName , registryName , filter , top , skipToken ) . map ( new Func1 < ServiceResponse < Page < BuildInner > > , Page < BuildInner > > ( ) { @ Override public Page < BuildInner > call ( ServiceResponse < Page < BuildInner > > response ) { return response . body ( ) ; } } ) ;
public class HilOut { /** * innerScan function calculates new upper and lower bounds and inserts the * points of the neighborhood the bounds are based on in the NN Set * @ param i position in pf of the feature for which the bounds should be * calculated * @ param maxcount maximal size of the neighborhood */ private void innerScan ( HilbertFeatures hf , final int i , final int maxcount ) { } }
final O p = hf . relation . get ( hf . pf [ i ] . id ) ; // Get only once for performance int a = i , b = i ; int level = h , levela = h , levelb = h ; // Explore up to " maxcount " neighbors in this pass for ( int count = 0 ; count < maxcount ; count ++ ) { final int c ; // Neighbor to explore if ( a == 0 ) { // At left end , explore right // assert ( b < capital _ n - 1 ) ; levelb = Math . min ( levelb , hf . pf [ b ] . level ) ; b ++ ; c = b ; } else if ( b >= capital_n - 1 ) { // At right end , explore left // assert ( a > 0 ) ; a -- ; levela = Math . min ( levela , hf . pf [ a ] . level ) ; c = a ; } else if ( hf . pf [ a - 1 ] . level >= hf . pf [ b ] . level ) { // Prefer higher level a -- ; levela = Math . min ( levela , hf . pf [ a ] . level ) ; c = a ; } else { // assert ( b < capital _ n - 1 ) ; levelb = Math . min ( levelb , hf . pf [ b ] . level ) ; b ++ ; c = b ; } if ( ! hf . pf [ i ] . nn_keys . contains ( hf . pf [ c ] . id ) ) { // hf . distcomp + + ; hf . pf [ i ] . insert ( hf . pf [ c ] . id , distq . distance ( p , hf . pf [ c ] . id ) , k ) ; if ( hf . pf [ i ] . nn . size ( ) == k ) { if ( hf . pf [ i ] . sum_nn < omega_star ) { break ; // stop = true } final int mlevel = Math . max ( levela , levelb ) ; if ( mlevel < level ) { level = mlevel ; final double delta = hf . minDistLevel ( hf . pf [ i ] . id , level ) ; if ( delta >= hf . pf [ i ] . nn . peek ( ) . doubleValue ( ) ) { break ; // stop = true } } } } } double br = hf . boxRadius ( i , a - 1 , b + 1 ) ; double newlb = 0.0 ; double newub = 0.0 ; for ( ObjectHeap . UnsortedIter < DoubleDBIDPair > iter = hf . pf [ i ] . nn . unsortedIter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DoubleDBIDPair entry = iter . get ( ) ; newub += entry . doubleValue ( ) ; if ( entry . doubleValue ( ) <= br ) { newlb += entry . doubleValue ( ) ; } } if ( newlb > hf . pf [ i ] . lbound ) { hf . pf [ i ] . lbound = newlb ; } if ( newub < hf . pf [ i ] . ubound ) { hf . pf [ i ] . ubound = newub ; }
public class TreeBarrier { /** * Waits for all threads to reach this barrier . * @ param ID the id of the thread attempting to reach the barrier . * @ throws InterruptedException if one of the threads was interrupted * while waiting on the barrier */ public void await ( int ID ) throws InterruptedException { } }
if ( parties == 1 ) // what are you doing ? ! return ; final boolean startCondition = competitionCondition ; int competingFor = ( locks . length * 2 - 1 - ID ) / 2 ; while ( competingFor >= 0 ) { final Lock node = locks [ competingFor ] ; if ( node . tryLock ( ) ) // we lose , must wait { synchronized ( node ) // ignore warning , its correct . We are using the lock both for competition AND to do an internal wait { while ( competitionCondition == startCondition ) node . wait ( ) ; } node . unlock ( ) ; wakeUpTarget ( competingFor * 2 + 1 ) ; wakeUpTarget ( competingFor * 2 + 2 ) ; return ; } else // we win , comete for another round ! { if ( competingFor == 0 ) break ; // we have won the last round ! competingFor = ( competingFor - 1 ) / 2 ; } } // We won ! Inform the losers competitionCondition = ! competitionCondition ; wakeUpTarget ( 0 ) ; // biggest loser
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getPageOverlayConditionalProcessingPgOvType ( ) { } }
if ( pageOverlayConditionalProcessingPgOvTypeEEnum == null ) { pageOverlayConditionalProcessingPgOvTypeEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 191 ) ; } return pageOverlayConditionalProcessingPgOvTypeEEnum ;
public class FCWsByteBufferImpl { /** * @ see com . ibm . ws . bytebuffer . internal . WsByteBufferImpl # get ( byte [ ] , int , int ) */ @ Override public WsByteBuffer get ( byte [ ] dst , int offset , int length ) { } }
convertBufferIfNeeded ( ) ; return super . get ( dst , offset , length ) ;
public class CapsuleLauncher { /** * Sets the Java homes that will be used by the capsules created by { @ code newCapsule } . * @ param javaHomes a map from Java version strings to their respective JVM installation paths * @ return { @ code this } */ public CapsuleLauncher setJavaHomes ( Map < String , List < Path > > javaHomes ) { } }
final Field homes = getCapsuleField ( "JAVA_HOMES" ) ; if ( homes != null ) set ( null , homes , javaHomes ) ; return this ;
public class PartitionQueryRequest { /** * Use { @ link # getParamTypesMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , com . google . spanner . v1 . Type > getParamTypes ( ) { } }
return getParamTypesMap ( ) ;
public class CSNodeSorter { /** * Finds the initial entry for the unordered map . * @ param map The unordered map . * @ return The initial entry to start sorting the map from . */ private static < T extends Node > Map . Entry < CSNodeWrapper , T > findLastEntry ( Map < CSNodeWrapper , T > map ) { } }
Map . Entry < CSNodeWrapper , T > nodeEntry = null ; // Find the initial entry for ( final Map . Entry < CSNodeWrapper , T > entry : map . entrySet ( ) ) { if ( entry . getKey ( ) . getNextNode ( ) == null ) { nodeEntry = entry ; break ; } } return nodeEntry ;
public class ProfileIndexFrameWriter { /** * { @ inheritDoc } */ protected void addProfilesList ( Profiles profiles , String text , String tableSummary , Content body ) { } }
Content heading = HtmlTree . HEADING ( HtmlConstants . PROFILE_HEADING , true , profilesLabel ) ; Content div = HtmlTree . DIV ( HtmlStyle . indexContainer , heading ) ; HtmlTree ul = new HtmlTree ( HtmlTag . UL ) ; ul . setTitle ( profilesLabel ) ; String profileName ; for ( int i = 1 ; i < profiles . getProfileCount ( ) ; i ++ ) { profileName = ( Profile . lookup ( i ) ) . name ; // If the profile has valid packages to be documented , add it to the // left - frame generated for profile index . if ( configuration . shouldDocumentProfile ( profileName ) ) ul . addContent ( getProfile ( profileName ) ) ; } div . addContent ( ul ) ; body . addContent ( div ) ;
public class CmsSecurityManager { /** * Creates a new resource of the given resource type with the provided content and properties . < p > * If the provided content is null and the resource is not a folder , the content will be set to an empty byte array . < p > * @ param context the current request context * @ param resourcename the name of the resource to create ( full path ) * @ param type the type of the resource to create * @ param content the content for the new resource * @ param properties the properties for the new resource * @ return the created resource * @ throws CmsException if something goes wrong * @ see org . opencms . file . types . I _ CmsResourceType # createResource ( CmsObject , CmsSecurityManager , String , byte [ ] , List ) */ public synchronized CmsResource createResource ( CmsRequestContext context , String resourcename , int type , byte [ ] content , List < CmsProperty > properties ) throws CmsException { } }
String checkExistsPath = "/" . equals ( resourcename ) ? "/" : CmsFileUtil . removeTrailingSeparator ( resourcename ) ; // We use checkExistsPath instead of resourcename because when creating a folder / foo / bar / , we want to fail // if a file / foo / bar already exists . if ( existsResource ( context , checkExistsPath , CmsResourceFilter . ALL ) ) { // check if the resource already exists by name throw new CmsVfsResourceAlreadyExistsException ( org . opencms . db . generic . Messages . get ( ) . container ( org . opencms . db . generic . Messages . ERR_RESOURCE_WITH_NAME_ALREADY_EXISTS_1 , resourcename ) ) ; } CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; CmsResource newResource = null ; try { checkOfflineProject ( dbc ) ; newResource = m_driverManager . createResource ( dbc , resourcename , type , content , properties ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_CREATE_RESOURCE_1 , resourcename ) , e ) ; } finally { dbc . clear ( ) ; } return newResource ;
public class BaseField { /** * Move the physical binary data to this field . * ( Must be the same physical type . . . setText makes sure of that ) * This is a little tricky . First , I call the behaviors ( doSetData ) * which actually moves the data . Then , I call the HandleFieldChange * listener for each field , except on a read move , where the HandleFieldChange * listener is called in the doValidRecord method because each field comes * in one at a time , and if a listener modifies or accesses * another field , the field may not have been moved from the db yet . * @ param data The raw data to move to this field . * @ param iDisplayOption If true , display the new field . * @ param iMoveMove The move mode . * @ return An error code ( NORMAL _ RETURN for success ) . */ public int setData ( Object data , boolean bDisplayOption , int iMoveMode ) { } }
int iErrorCode = DBConstants . NORMAL_RETURN ; FieldListener nextListener = ( FieldListener ) this . getNextValidListener ( iMoveMode ) ; Object dataOld = m_data ; boolean m_bModifiedOld = m_bModified ; if ( nextListener != null ) { boolean bOldState = nextListener . setEnabledListener ( false ) ; // Disable the listener to eliminate echos iErrorCode = nextListener . doSetData ( data , bDisplayOption , iMoveMode ) ; nextListener . setEnabledListener ( bOldState ) ; // Reenable } else iErrorCode = this . doSetData ( data , bDisplayOption , iMoveMode ) ; if ( iErrorCode == DBConstants . NORMAL_RETURN ) if ( m_bJustChanged ) { iErrorCode = this . handleFieldChanged ( bDisplayOption , iMoveMode ) ; if ( iErrorCode != DBConstants . NORMAL_RETURN ) { // Revert the data to old if ( dataOld != m_data ) { // Always if ( ( dataOld == data ) || ( ( dataOld != null ) && ( dataOld . equals ( data ) ) ) ) // Make sure behaviors aren ' t causing change iErrorCode = DBConstants . NORMAL_RETURN ; // If changes soely due to a behavior would cause an error , don ' t return an error m_data = dataOld ; m_bModified = m_bModifiedOld ; m_bJustChanged = false ; } } } if ( bDisplayOption ) // Can ' t do only if ( m _ bJustChanged ) ( last value may not be on screen ) this . displayField ( ) ; return iErrorCode ;
public class CouchDBSchemaManager { /** * Creates the view for select specific fields . * @ param views * the views */ private void createViewForSelectSpecificFields ( Map < String , MapReduce > views ) { } }
if ( views . get ( CouchDBConstants . FIELDS ) == null ) { MapReduce mapr = new MapReduce ( ) ; mapr . setMap ( "function(doc){for(field in doc){emit(field, doc[field]);}}" ) ; views . put ( CouchDBConstants . FIELDS , mapr ) ; }
public class AbstractProcessTracker { /** * Waits for the process to exit ; this method blocks until the process has completed ( at which point it returns the process * exit code ) or until < code > deadline < / code > has elapsed , at which point it returns Integer . MIN _ VALUE * @ param deadline * @ return the exit code of the process */ public int waitForExit ( final Deadline deadline ) { } }
final int intervalMax = 4500 ; // the maximum time between polls is 4.5 seconds int interval = 5 ; // initial sleep will be 3 * this , so 15ms while ( ! isFinished ( ) && deadline . isValid ( ) ) { try { // Check very frequently initially , tripling the wait each time // this allows us to return very quickly for short - running processes , but not hammer the CPU for long - running processes if ( interval != intervalMax ) { interval = Math . min ( interval * 3 , intervalMax ) ; } final boolean hasExited = this . process . waitFor ( Math . min ( deadline . getTimeLeft ( ) , interval ) , TimeUnit . MILLISECONDS ) ; if ( hasExited ) { // Wait for the deadline to expire or for any output reading to complete while ( deadline . isValid ( ) && isStillReadingOutput ( ) ) new Timeout ( 10 , TimeUnit . MILLISECONDS ) . sleep ( ) ; return exitCode ( ) ; } } catch ( InterruptedException e ) { } } if ( deadline . isExpired ( ) ) return Integer . MIN_VALUE ; else return exitCode ( ) ;
public class DMatrixSparseCSC { /** * Given the histogram of columns compute the col _ idx for the matrix . nz _ length is automatically set and * nz _ values will grow if needed . * @ param histogram histogram of column values in the sparse matrix . modified , see above . */ public void histogramToStructure ( int histogram [ ] ) { } }
col_idx [ 0 ] = 0 ; int index = 0 ; for ( int i = 1 ; i <= numCols ; i ++ ) { col_idx [ i ] = index += histogram [ i - 1 ] ; } nz_length = index ; growMaxLength ( nz_length , false ) ; if ( col_idx [ numCols ] != nz_length ) throw new RuntimeException ( "Egads" ) ;