signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class OauthRawGcsService { /** * Write the provided chunk at the offset specified in the token . If finalChunk is set , the file
* will be closed . */
private RawGcsCreationToken put ( final GcsRestCreationToken token , ByteBuffer chunk , final boolean isFinalChunk , long timeoutMillis ) throws IOException { } } | final int length = chunk . remaining ( ) ; HTTPRequest req = createPutRequest ( token , chunk , isFinalChunk , timeoutMillis , length ) ; HTTPRequestInfo info = new HTTPRequestInfo ( req ) ; HTTPResponse response ; try { response = urlfetch . fetch ( req ) ; } catch ( IOException e ) { throw createIOException ( info , e ) ; } return handlePutResponse ( token , isFinalChunk , length , info , response ) ; |
public class Matrix3x2d { /** * Set the elements of this matrix to the ones in < code > m < / code > .
* @ param m
* the matrix to copy the elements from
* @ return this */
public Matrix3x2d set ( Matrix3x2dc m ) { } } | if ( m instanceof Matrix3x2d ) { MemUtil . INSTANCE . copy ( ( Matrix3x2d ) m , this ) ; } else { setMatrix3x2dc ( m ) ; } return this ; |
public class FSDirectory { /** * Return the inode array representing the given inode ' s full path name
* @ param inode an inode
* @ return the node array representation of the given inode ' s full path
* @ throws IOException if the inode is invalid */
static INode [ ] getINodeArray ( INode inode ) throws IOException { } } | // calculate the depth of this inode from root
int depth = getPathDepth ( inode ) ; INode [ ] inodes = new INode [ depth ] ; // fill up the inodes in the path from this inode to root
for ( int i = 0 ; i < depth ; i ++ ) { inodes [ depth - i - 1 ] = inode ; inode = inode . parent ; } return inodes ; |
public class PDFView { /** * Use a file as the pdf source */
public Configurator fromFile ( File file ) { } } | if ( ! file . exists ( ) ) throw new FileNotFoundException ( file . getAbsolutePath ( ) + "does not exist." ) ; return new Configurator ( Uri . fromFile ( file ) ) ; |
public class UpdateVirtualNodeRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateVirtualNodeRequest updateVirtualNodeRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateVirtualNodeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateVirtualNodeRequest . getClientToken ( ) , CLIENTTOKEN_BINDING ) ; protocolMarshaller . marshall ( updateVirtualNodeRequest . getMeshName ( ) , MESHNAME_BINDING ) ; protocolMarshaller . marshall ( updateVirtualNodeRequest . getSpec ( ) , SPEC_BINDING ) ; protocolMarshaller . marshall ( updateVirtualNodeRequest . getVirtualNodeName ( ) , VIRTUALNODENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DBInstance { /** * Contains one or more identifiers of Aurora DB clusters to which the RDS DB instance is replicated as a Read
* Replica . For example , when you create an Aurora Read Replica of an RDS MySQL DB instance , the Aurora MySQL DB
* cluster for the Aurora Read Replica is shown . This output does not contain information about cross region Aurora
* Read Replicas .
* < note >
* Currently , each RDS DB instance can have only one Aurora Read Replica .
* < / note >
* @ return Contains one or more identifiers of Aurora DB clusters to which the RDS DB instance is replicated as a
* Read Replica . For example , when you create an Aurora Read Replica of an RDS MySQL DB instance , the Aurora
* MySQL DB cluster for the Aurora Read Replica is shown . This output does not contain information about
* cross region Aurora Read Replicas . < / p > < note >
* Currently , each RDS DB instance can have only one Aurora Read Replica . */
public java . util . List < String > getReadReplicaDBClusterIdentifiers ( ) { } } | if ( readReplicaDBClusterIdentifiers == null ) { readReplicaDBClusterIdentifiers = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return readReplicaDBClusterIdentifiers ; |
public class ZooKeeperUtil { /** * Convert byte array to original long value .
* @ param bytes the byte array
* @ return the original long value */
public static long bytesToLong ( byte [ ] bytes ) { } } | long l = 0 ; for ( int i = 0 ; i < 0 + 8 ; i ++ ) { l <<= 8 ; l ^= bytes [ i ] & 0xFF ; } return l ; |
public class CProductLocalServiceBaseImpl { /** * Returns the c product matching the UUID and group .
* @ param uuid the c product ' s UUID
* @ param groupId the primary key of the group
* @ return the matching c product
* @ throws PortalException if a matching c product could not be found */
@ Override public CProduct getCProductByUuidAndGroupId ( String uuid , long groupId ) throws PortalException { } } | return cProductPersistence . findByUUID_G ( uuid , groupId ) ; |
public class InterfaceEndpointsInner { /** * Creates or updates an interface endpoint in the specified resource group .
* @ param resourceGroupName The name of the resource group .
* @ param interfaceEndpointName The name of the interface endpoint .
* @ param parameters Parameters supplied to the create or update interface endpoint operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < InterfaceEndpointInner > createOrUpdateAsync ( String resourceGroupName , String interfaceEndpointName , InterfaceEndpointInner parameters ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , interfaceEndpointName , parameters ) . map ( new Func1 < ServiceResponse < InterfaceEndpointInner > , InterfaceEndpointInner > ( ) { @ Override public InterfaceEndpointInner call ( ServiceResponse < InterfaceEndpointInner > response ) { return response . body ( ) ; } } ) ; |
public class FileOutputFormat { /** * Get the { @ link CompressionCodec } for compressing the job outputs .
* @ param job the { @ link Job } to look in
* @ param defaultValue the { @ link CompressionCodec } to return if not set
* @ return the { @ link CompressionCodec } to be used to compress the
* job outputs
* @ throws IllegalArgumentException if the class was specified , but not found */
public static Class < ? extends CompressionCodec > getOutputCompressorClass ( JobContext job , Class < ? extends CompressionCodec > defaultValue ) { } } | Class < ? extends CompressionCodec > codecClass = defaultValue ; Configuration conf = job . getConfiguration ( ) ; String name = conf . get ( "mapred.output.compression.codec" ) ; if ( name != null ) { try { codecClass = conf . getClassByName ( name ) . asSubclass ( CompressionCodec . class ) ; } catch ( ClassNotFoundException e ) { throw new IllegalArgumentException ( "Compression codec " + name + " was not found." , e ) ; } } return codecClass ; |
public class TaintFrameModelingVisitor { /** * Push a value to the stack
* The information passed will be viewable when the stack will be print . ( See printStackState ( ) )
* @ param debugInfo String representation of the value push */
private void pushSafeDebug ( String debugInfo ) { } } | getFrame ( ) . pushValue ( new Taint ( Taint . State . SAFE ) . setDebugInfo ( debugInfo ) ) ; |
public class PageFlowViewHandler { /** * If we are in a request forwarded by { @ link PageFlowNavigationHandler } , returns < code > null < / code > ; otherwise ,
* delegates to the base ViewHandler . */
public UIViewRoot restoreView ( FacesContext context , String viewId ) { } } | ExternalContext externalContext = context . getExternalContext ( ) ; Object request = externalContext . getRequest ( ) ; HttpServletRequest httpRequest = null ; if ( request instanceof HttpServletRequest ) { httpRequest = ( HttpServletRequest ) request ; // If we did a forward in PageFlowNavigationHandler , don ' t try to restore the view .
if ( httpRequest . getAttribute ( PageFlowNavigationHandler . ALREADY_FORWARDED_ATTR ) != null ) { return null ; } // Create / restore the backing bean that corresponds to this request .
HttpServletResponse response = ( HttpServletResponse ) externalContext . getResponse ( ) ; ServletContext servletContext = ( ServletContext ) externalContext . getContext ( ) ; setBackingBean ( httpRequest , response , servletContext ) ; } UIViewRoot viewRoot = _delegate . restoreView ( context , viewId ) ; savePreviousPageInfo ( httpRequest , externalContext , viewId , viewRoot ) ; return viewRoot ; |
public class VarArgsToMapAdapterGenerator { /** * Generate byte code that
* < p > < ul >
* < li > takes a specified number of variables as arguments ( types of the arguments are provided in { @ code javaTypes } )
* < li > put the variables in a map ( keys of the map are provided in { @ code names } )
* < li > invoke the provided { @ code function } with the map
* < li > return with the result of the function call ( type must match { @ code returnType } )
* < / ul > < / p > */
public static MethodHandle generateVarArgsToMapAdapter ( Class < ? > returnType , List < Class < ? > > javaTypes , List < String > names , Function < Map < String , Object > , Object > function ) { } } | checkCondition ( javaTypes . size ( ) <= 254 , NOT_SUPPORTED , "Too many arguments for vararg function" ) ; CallSiteBinder callSiteBinder = new CallSiteBinder ( ) ; ClassDefinition classDefinition = new ClassDefinition ( a ( PUBLIC , FINAL ) , makeClassName ( "VarArgsToMapAdapter" ) , type ( Object . class ) ) ; ImmutableList . Builder < Parameter > parameterListBuilder = ImmutableList . builder ( ) ; for ( int i = 0 ; i < javaTypes . size ( ) ; i ++ ) { Class < ? > javaType = javaTypes . get ( i ) ; parameterListBuilder . add ( arg ( "input_" + i , javaType ) ) ; } ImmutableList < Parameter > parameterList = parameterListBuilder . build ( ) ; MethodDefinition methodDefinition = classDefinition . declareMethod ( a ( PUBLIC , STATIC ) , "varArgsToMap" , type ( returnType ) , parameterList ) ; BytecodeBlock body = methodDefinition . getBody ( ) ; // ImmutableMap . Builder can not be used here because it doesn ' t allow nulls .
Variable map = methodDefinition . getScope ( ) . declareVariable ( "map" , methodDefinition . getBody ( ) , invokeStatic ( Maps . class , "newHashMapWithExpectedSize" , HashMap . class , constantInt ( javaTypes . size ( ) ) ) ) ; for ( int i = 0 ; i < javaTypes . size ( ) ; i ++ ) { body . append ( map . invoke ( "put" , Object . class , constantString ( names . get ( i ) ) . cast ( Object . class ) , parameterList . get ( i ) . cast ( Object . class ) ) ) ; } body . append ( loadConstant ( callSiteBinder , function , Function . class ) . invoke ( "apply" , Object . class , map . cast ( Object . class ) ) . cast ( returnType ) . ret ( ) ) ; Class < ? > generatedClass = defineClass ( classDefinition , Object . class , callSiteBinder . getBindings ( ) , new DynamicClassLoader ( VarArgsToMapAdapterGenerator . class . getClassLoader ( ) ) ) ; return Reflection . methodHandle ( generatedClass , "varArgsToMap" , javaTypes . toArray ( new Class < ? > [ javaTypes . size ( ) ] ) ) ; |
public class AuthenticationManagerCommandAction { /** * Persists the user accounts to a properties file that is only available to this site only . */
private void persistRealmChanges ( ) { } } | configManager . persistProperties ( realm . getProperties ( ) , new File ( applicationContentDir , PersistablePropertiesRealm . REALM_FILE_NAME ) , null ) ; |
public class AmazonAlexaForBusinessClient { /** * Creates a room with the specified details .
* @ param createRoomRequest
* @ return Result of the CreateRoom operation returned by the service .
* @ throws AlreadyExistsException
* The resource being created already exists .
* @ throws LimitExceededException
* You are performing an action that would put you beyond your account ' s limits .
* @ sample AmazonAlexaForBusiness . CreateRoom
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / CreateRoom " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public CreateRoomResult createRoom ( CreateRoomRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateRoom ( request ) ; |
public class I18N { /** * Get an annotation description string . This is a format pattern which will
* describe a BugAnnotation in the context of a particular bug instance . Its
* single format argument is the BugAnnotation .
* @ param key
* the annotation description to retrieve */
public String getAnnotationDescription ( String key ) { } } | try { return annotationDescriptionBundle . getString ( key ) ; } catch ( MissingResourceException mre ) { if ( DEBUG ) { return "TRANSLATE(" + key + ") (param={0}}" ; } else { try { return englishAnnotationDescriptionBundle . getString ( key ) ; } catch ( MissingResourceException mre2 ) { return key + " {0}" ; } } } |
public class ExecUtils { /** * Tries to find the given executable ( specified by its name ) in the system ' s path . It checks for a file having
* one of the extensions contained in { @ link # EXECUTABLE _ EXTENSIONS } , and checks that this file is executable .
* @ param executable the name of the program to find , generally without the extension
* @ return the file of the program to be searched for if found . { @ code null } otherwise . */
public static File findExecutableInSystemPath ( String executable ) { } } | String systemPath = System . getenv ( "PATH" ) ; // Fast failure if we don ' t have the PATH defined .
if ( systemPath == null ) { return null ; } String [ ] pathDirs = systemPath . split ( File . pathSeparator ) ; for ( String pathDir : pathDirs ) { File dir = new File ( pathDir ) ; if ( dir . isDirectory ( ) ) { File file = findExecutableInDirectory ( executable , dir ) ; if ( file != null ) { return file ; } } } return null ; |
public class AmazonElasticLoadBalancingClient { /** * Describes the specified target groups or all of your target groups . By default , all target groups are described .
* Alternatively , you can specify one of the following to filter the results : the ARN of the load balancer , the
* names of one or more target groups , or the ARNs of one or more target groups .
* To describe the targets for a target group , use < a > DescribeTargetHealth < / a > . To describe the attributes of a
* target group , use < a > DescribeTargetGroupAttributes < / a > .
* @ param describeTargetGroupsRequest
* @ return Result of the DescribeTargetGroups operation returned by the service .
* @ throws LoadBalancerNotFoundException
* The specified load balancer does not exist .
* @ throws TargetGroupNotFoundException
* The specified target group does not exist .
* @ sample AmazonElasticLoadBalancing . DescribeTargetGroups
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancingv2-2015-12-01 / DescribeTargetGroups "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeTargetGroupsResult describeTargetGroups ( DescribeTargetGroupsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeTargetGroups ( request ) ; |
public class SessionStoreInterceptor { /** * Returns all property that stripes will replace for request .
* @ param request Request .
* @ return All property that stripes will replace for request . */
protected Set < String > getParameters ( HttpServletRequest request ) { } } | Set < String > parameters = new HashSet < String > ( ) ; Enumeration < ? > paramNames = request . getParameterNames ( ) ; while ( paramNames . hasMoreElements ( ) ) { String parameter = ( String ) paramNames . nextElement ( ) ; // Keep only first property .
while ( parameter . contains ( "." ) || parameter . contains ( "[" ) ) { if ( parameter . contains ( "." ) ) { parameter = parameter . substring ( 0 , parameter . indexOf ( "." ) ) ; } if ( parameter . contains ( "[" ) ) { parameter = parameter . substring ( 0 , parameter . indexOf ( "[" ) ) ; } } parameters . add ( parameter ) ; } return parameters ; |
public class AFPPostProcessor { /** * return the rmsd of two blocks */
private static double combineRmsd ( int b1 , int b2 , AFPChain afpChain , Atom [ ] ca1 , Atom [ ] ca2 ) { } } | int i ; int afpn = 0 ; int [ ] afpChainList = afpChain . getAfpChainList ( ) ; int [ ] block2Afp = afpChain . getBlock2Afp ( ) ; int [ ] blockSize = afpChain . getBlockSize ( ) ; int [ ] list = new int [ blockSize [ b1 ] + blockSize [ b2 ] ] ; for ( i = block2Afp [ b1 ] ; i < block2Afp [ b1 ] + blockSize [ b1 ] ; i ++ ) { list [ afpn ++ ] = afpChainList [ i ] ; } for ( i = block2Afp [ b2 ] ; i < block2Afp [ b2 ] + blockSize [ b2 ] ; i ++ ) { list [ afpn ++ ] = afpChainList [ i ] ; } double rmsd = AFPChainer . calAfpRmsd ( afpn , list , 0 , afpChain , ca1 , ca2 ) ; afpChain . setBlock2Afp ( block2Afp ) ; afpChain . setBlockSize ( blockSize ) ; afpChain . setAfpChainList ( afpChainList ) ; return rmsd ; |
public class HostAvailabilityListener { /** * This implements the QueryFailureListener interface
* @ param queryBatch the exception with information about the failed query attempt */
public void processFailure ( QueryBatchException queryBatch ) { } } | boolean isHostUnavailableException = processException ( queryBatch . getBatcher ( ) , queryBatch , queryBatch . getClient ( ) . getHost ( ) ) ; if ( isHostUnavailableException == true ) { try { logger . warn ( "Retrying failed batch: {}, results so far: {}, forest: {}, forestBatch: {}, forest results so far: {}" , queryBatch . getJobBatchNumber ( ) , queryBatch . getJobResultsSoFar ( ) , queryBatch . getForest ( ) . getForestName ( ) , queryBatch . getForestBatchNumber ( ) , queryBatch . getForestResultsSoFar ( ) ) ; queryBatch . getBatcher ( ) . retryWithFailureListeners ( queryBatch ) ; } catch ( RuntimeException e ) { logger . error ( "Exception during retry" , e ) ; processFailure ( new QueryBatchException ( queryBatch , e ) ) ; } } |
public class MinimizeEnergyPrune { /** * Returns the total energy after removing a corner
* @ param removed index of the corner that is being removed
* @ param corners list of corner indexes */
protected double energyRemoveCorner ( int removed , GrowQueue_I32 corners ) { } } | double total = 0 ; int cornerA = CircularIndex . addOffset ( removed , - 1 , corners . size ( ) ) ; int cornerB = CircularIndex . addOffset ( removed , 1 , corners . size ( ) ) ; total += computeSegmentEnergy ( corners , cornerA , cornerB ) ; if ( cornerA > cornerB ) { for ( int i = cornerB ; i < cornerA ; i ++ ) total += energySegment [ i ] ; } else { for ( int i = 0 ; i < cornerA ; i ++ ) { total += energySegment [ i ] ; } for ( int i = cornerB ; i < corners . size ( ) ; i ++ ) { total += energySegment [ i ] ; } } return total ; |
public class FessMessages { /** * Add the created action message for the key ' success . crud _ update _ crud _ table ' with parameters .
* < pre >
* message : Updated data .
* < / pre >
* @ param property The property name for the message . ( NotNull )
* @ return this . ( NotNull ) */
public FessMessages addSuccessCrudUpdateCrudTable ( String property ) { } } | assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_crud_update_crud_table ) ) ; return this ; |
public class druidGParser { /** * druidG . g : 291:1 : simpleDim returns [ Pair < String , String > dims ] : ( a = ID ( WS AS WS b = ID ) ? ) ; */
public final Pair < String , String > simpleDim ( ) throws RecognitionException { } } | Pair < String , String > dims = null ; Token a = null ; Token b = null ; try { // druidG . g : 292:2 : ( ( a = ID ( WS AS WS b = ID ) ? ) )
// druidG . g : 292:4 : ( a = ID ( WS AS WS b = ID ) ? )
{ // druidG . g : 292:4 : ( a = ID ( WS AS WS b = ID ) ? )
// druidG . g : 292:5 : a = ID ( WS AS WS b = ID ) ?
{ a = ( Token ) match ( input , ID , FOLLOW_ID_in_simpleDim1974 ) ; // druidG . g : 292:10 : ( WS AS WS b = ID ) ?
int alt130 = 2 ; int LA130_0 = input . LA ( 1 ) ; if ( ( LA130_0 == WS ) ) { int LA130_1 = input . LA ( 2 ) ; if ( ( LA130_1 == AS ) ) { alt130 = 1 ; } } switch ( alt130 ) { case 1 : // druidG . g : 292:11 : WS AS WS b = ID
{ match ( input , WS , FOLLOW_WS_in_simpleDim1977 ) ; match ( input , AS , FOLLOW_AS_in_simpleDim1979 ) ; match ( input , WS , FOLLOW_WS_in_simpleDim1981 ) ; b = ( Token ) match ( input , ID , FOLLOW_ID_in_simpleDim1985 ) ; } break ; } dims = ( b != null ) ? new Pair < String , String > ( ( a != null ? a . getText ( ) : null ) , ( b != null ? b . getText ( ) : null ) ) : new Pair < String , String > ( ( a != null ? a . getText ( ) : null ) , null ) ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
} return dims ; |
public class TypeValidator { /** * Expect the type to be a string or symbol , or a type convertible to a string . If the expectation
* is not met , issue a warning at the provided node ' s source code position . */
void expectStringOrSymbol ( Node n , JSType type , String msg ) { } } | if ( ! type . matchesStringContext ( ) && ! type . matchesSymbolContext ( ) ) { mismatch ( n , msg , type , STRING_SYMBOL ) ; } |
public class CoreNLPSentence { /** * Span from the start of the first token to the end of the last . */
public OffsetRange < CharOffset > offsets ( ) { } } | final CharOffset start = tokens . get ( 0 ) . offsets ( ) . startInclusive ( ) ; final CharOffset end = tokens . reverse ( ) . get ( 0 ) . offsets ( ) . endInclusive ( ) ; return OffsetRange . charOffsetRange ( start . asInt ( ) , end . asInt ( ) ) ; |
public class BaseDelegatingExpirationPolicy { /** * Add policy .
* @ param name the name
* @ param policy the policy */
public void addPolicy ( final Enum name , final ExpirationPolicy policy ) { } } | LOGGER . trace ( "Adding expiration policy [{}] with name [{}]" , policy , name ) ; addPolicy ( name . name ( ) , policy ) ; |
public class AccentSwitch { /** * Called from onTouchEvent to end a drag operation .
* @ param ev
* Event that triggered the end of drag mode - ACTION _ UP or
* ACTION _ CANCEL */
private void stopDrag ( MotionEvent ev ) { } } | mTouchMode = TOUCH_MODE_IDLE ; // Up and not canceled , also checks the switch has not been disabled
// during the drag
boolean commitChange = ev . getAction ( ) == MotionEvent . ACTION_UP && isEnabled ( ) ; cancelSuperTouch ( ev ) ; if ( commitChange ) { boolean newState ; mVelocityTracker . computeCurrentVelocity ( 1000 ) ; float xvel = mVelocityTracker . getXVelocity ( ) ; if ( Math . abs ( xvel ) > mMinFlingVelocity ) { // newState = isLayoutRtl ( ) ? ( xvel < 0 ) : ( xvel > 0 ) ;
newState = xvel > 0 ; } else { newState = getTargetCheckedState ( ) ; } animateThumbToCheckedState ( newState ) ; } else { animateThumbToCheckedState ( isChecked ( ) ) ; } |
public class Filter { /** * Works out what type of command has been put into the method .
* @ param cmd The string of text from SServer .
* @ param f An instance of CommandFilter . */
public void run ( String cmd , CommandFilter f ) { } } | CommandWords commandWord = null ; if ( cmd . contains ( " " ) ) { try { commandWord = CommandWords . valueOf ( cmd . substring ( 1 , cmd . indexOf ( " " ) ) ) ; } catch ( IllegalArgumentException e ) { commandWord = CommandWords . INVALID_COMMAND_WORD ; } } else { commandWord = CommandWords . INVALID_COMMAND_WORD ; } switch ( commandWord ) { case change_player_type : f . changePlayerTypeCommand ( cmd . substring ( 20 , cmd . length ( ) - 1 ) ) ; break ; case error : f . errorCommand ( cmd . substring ( 7 , cmd . length ( ) - 1 ) ) ; break ; case hear : f . hearCommand ( cmd . substring ( 6 , cmd . length ( ) - 1 ) ) ; break ; case init : f . initCommand ( cmd . substring ( 6 , cmd . length ( ) - 1 ) ) ; break ; case ok : f . okCommand ( cmd . substring ( 4 , cmd . length ( ) - 1 ) ) ; break ; case player_param : f . playerParamCommand ( cmd . substring ( 14 , cmd . length ( ) - 1 ) ) ; break ; case player_type : f . playerTypeCommand ( cmd . substring ( 13 , cmd . length ( ) - 1 ) ) ; break ; case see : f . seeCommand ( cmd . substring ( 5 , cmd . length ( ) - 1 ) ) ; break ; case see_global : f . seeCommand ( cmd . substring ( 12 , cmd . length ( ) - 1 ) ) ; break ; case sense_body : f . senseBodyCommand ( cmd . substring ( 12 , cmd . length ( ) - 1 ) ) ; break ; case server_param : f . serverParamCommand ( cmd . substring ( 14 , cmd . length ( ) - 1 ) ) ; break ; case warning : f . warningCommand ( cmd . substring ( 9 , cmd . length ( ) - 1 ) ) ; break ; case INVALID_COMMAND_WORD : default : throw new Error ( "Invalid command received: \"" + cmd + "\"" ) ; } |
public class TaskGetKey { /** * Top - level non - recursive invoke */
@ Override public void dinvoke ( H2ONode sender ) { } } | _h2o = sender ; Key k = _key ; _key = null ; // Not part of the return result
assert k . home ( ) ; // Gets are always from home ( less we do replication )
// Shipping a result ? Track replicas so we can invalidate . There ' s a
// narrow race on a moving K / V mapping tracking this Value just as it gets
// deleted - in which case , simply retry for another Value .
do _val = Value . STORE_get ( k ) ; // The return result
while ( _val != null && ! _val . setReplica ( sender ) ) ; tryComplete ( ) ; |
public class PKCS12 { /** * Check if the file provide is PKCS12
* @ param cert certificate to be validated
* @ param pass password to be provided
* @ throws Exception to indicate an invalid certificate */
public static void validate ( byte [ ] cert , String pass ) throws Exception { } } | try { KeyStore keyStore = KeyStore . getInstance ( ALGORITHM ) ; keyStore . load ( new ByteArrayInputStream ( cert ) , pass . toCharArray ( ) ) ; } catch ( Exception e ) { throw new Exception ( "Certificate is not valid!" , e ) ; } |
public class SimpleCassandraDao { /** * Get multiple values
* @ param keys
* @ return */
public Map < String , String > getMulti ( String columnName , String ... keys ) { } } | MultigetSliceQuery < String , String , String > q = createMultigetSliceQuery ( keyspace , serializer , serializer , serializer ) ; q . setColumnFamily ( columnFamilyName ) ; q . setKeys ( keys ) ; q . setColumnNames ( columnName ) ; QueryResult < Rows < String , String , String > > r = q . execute ( ) ; Rows < String , String , String > rows = r . get ( ) ; Map < String , String > ret = new HashMap < String , String > ( keys . length ) ; for ( String k : keys ) { HColumn < String , String > c = rows . getByKey ( k ) . getColumnSlice ( ) . getColumnByName ( columnName ) ; if ( c != null && c . getValue ( ) != null ) { ret . put ( k , c . getValue ( ) ) ; } } return ret ; |
public class RobotoTypefaces { /** * Set up typeface for TextView .
* @ param textView The text view
* @ param typeface The value of " robotoTypeface " attribute */
public static void setUpTypeface ( @ NonNull TextView textView , @ RobotoTypeface int typeface ) { } } | setUpTypeface ( textView , obtainTypeface ( textView . getContext ( ) , typeface ) ) ; |
public class BaseRTMPHandler { /** * { @ inheritDoc } */
public void connectionOpened ( RTMPConnection conn ) { } } | if ( log . isTraceEnabled ( ) ) { log . trace ( "connectionOpened - conn: {} state: {}" , conn , conn . getState ( ) ) ; } conn . open ( ) ; // start the wait for handshake
conn . startWaitForHandshake ( ) ; |
public class ConstraintSolver { /** * Remove a batch of { @ link Variable } s from this { @ link ConstraintSolver } .
* @ param v The batch of { @ link Variable } s to remove . */
public final void removeVariables ( Variable [ ] v ) throws VariableNotFound , IllegalVariableRemoval { } } | HashSet < Constraint > incidentRevised = new HashSet < Constraint > ( ) ; // Keep track of solvers of dependent variables
HashMap < ConstraintSolver , ArrayList < Variable > > solversToDepVars = new HashMap < ConstraintSolver , ArrayList < Variable > > ( ) ; for ( Variable var : v ) { if ( ! this . theNetwork . containsVariable ( var ) ) throw new VariableNotFound ( var ) ; Constraint [ ] incident = this . theNetwork . getIncidentEdges ( var ) ; for ( Constraint con : incident ) { if ( ( ! con . isAutoRemovable ( ) && ! ( con instanceof DummyConstraint ) ) ) { throw new IllegalVariableRemoval ( var , this . theNetwork . getIncidentEdges ( var ) ) ; } else if ( con instanceof DummyConstraint ) { Constraint toRemove = this . removeDummyConstraint ( ( DummyConstraint ) con ) ; if ( toRemove != null ) incidentRevised . add ( toRemove ) ; } else incidentRevised . add ( con ) ; } // Gather solvers of dependent variables
for ( Variable depVar : var . getDependentVariables ( ) ) { if ( ! solversToDepVars . containsKey ( depVar . getConstraintSolver ( ) ) ) { solversToDepVars . put ( depVar . getConstraintSolver ( ) , new ArrayList < Variable > ( ) ) ; } solversToDepVars . get ( depVar . getConstraintSolver ( ) ) . add ( depVar ) ; } } // Remove dependent variables
for ( ConstraintSolver cs : solversToDepVars . keySet ( ) ) { cs . removeVariables ( solversToDepVars . get ( cs ) . toArray ( new Variable [ solversToDepVars . get ( cs ) . size ( ) ] ) ) ; logger . finest ( "Removed " + solversToDepVars . get ( cs ) . size ( ) + " dependent variables" ) ; } this . removeConstraints ( incidentRevised . toArray ( new Constraint [ incidentRevised . size ( ) ] ) ) ; removeVariablesSub ( v ) ; for ( Variable var : v ) { this . theNetwork . removeVariable ( var ) ; } for ( ArrayList < Variable > vec : components . values ( ) ) { vec . removeAll ( Arrays . asList ( v ) ) ; } if ( ! skipPropagation && autoprop && checkDomainsInstantiated ( ) ) this . propagate ( ) ; logger . finest ( "Removed variables " + Arrays . toString ( v ) ) ; |
public class CommerceNotificationAttachmentUtil { /** * Returns the commerce notification attachment where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchNotificationAttachmentException } if it could not be found .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching commerce notification attachment
* @ throws NoSuchNotificationAttachmentException if a matching commerce notification attachment could not be found */
public static CommerceNotificationAttachment findByUUID_G ( String uuid , long groupId ) throws com . liferay . commerce . notification . exception . NoSuchNotificationAttachmentException { } } | return getPersistence ( ) . findByUUID_G ( uuid , groupId ) ; |
public class ImageMetadataFilter { /** * XMLFilter methods - - - - - */
@ Override public void startElement ( final String uri , final String localName , final String name , final Attributes atts ) throws SAXException { } } | if ( TOPIC_IMAGE . matches ( atts ) || SVG_D_SVGREF . matches ( atts ) ) { final XMLUtils . AttributesBuilder a = new XMLUtils . AttributesBuilder ( atts ) ; final URI href = toURI ( atts . getValue ( ATTRIBUTE_NAME_HREF ) ) ; if ( href != null ) { final URI imgInput = getImageFile ( href ) ; if ( imgInput != null ) { Attributes m = cache . computeIfAbsent ( imgInput , this :: readMetadata ) ; a . addAll ( m ) ; } else { logger . error ( "Image file " + href + " not found" ) ; } } depth = 1 ; super . startPrefixMapping ( DITA_OT_NS_PREFIX , DITA_OT_NS ) ; super . startElement ( uri , localName , name , a . build ( ) ) ; } else { if ( depth > 0 ) { depth ++ ; } super . startElement ( uri , localName , name , atts ) ; } |
public class JodaBeanSimpleMapWriter { /** * write table */
private Object writeTable ( SerIterator itemIterator ) { } } | List < Object > result = new ArrayList < > ( ) ; while ( itemIterator . hasNext ( ) ) { itemIterator . next ( ) ; Object outputKey = writeObject ( itemIterator . keyType ( ) , itemIterator . key ( ) , null ) ; Object outputCol = writeObject ( itemIterator . columnType ( ) , itemIterator . column ( ) , null ) ; Object outputValue = writeObject ( itemIterator . valueType ( ) , itemIterator . value ( ) , itemIterator ) ; result . add ( Arrays . asList ( outputKey , outputCol , outputValue ) ) ; } return result ; |
public class PackageInspectorImpl { /** * This iterator will walk the package index , returning only packages that indicate
* they are API packages and are included both by the kernel ( core ) feature and
* another enabled liberty feature or features .
* @ see com . ibm . ws . kernel . provisioning . packages . SharedPackageInspector # listKernelApiPackages ( ) */
public Iterator < String > listKernelBlackListApiPackages ( ) { } } | ProductPackages index = packageIndex ; if ( index != null ) { return index . packageIterator ( new Filter < PackageInfo > ( ) { @ Override public boolean includeValue ( String packageName , PackageInfo value ) { return value . isApi ( ) && value . isKernelExportBlacklistedPackage ( ) ; } } ) ; } return new EmptyIterator ( ) ; |
public class AnnotationTargetsImpl_Targets { /** * PostCondition : haveScannedReferencedClasses = = true */
@ Override public void scan ( ClassSource_Aggregate classSource , Set < String > specificClassNames ) throws AnnotationTargets_Exception { } } | try { createScanner ( classSource ) . scan ( specificClassNames ) ; // ' createScanner ' throws class source exception
} finally { haveScannedDirectClasses = true ; haveScannedReferencedClasses = true ; } |
public class FastTrackData { /** * Retrieve a table of data .
* @ param type table type
* @ return FastTrackTable instance */
public FastTrackTable getTable ( FastTrackTableType type ) { } } | FastTrackTable result = m_tables . get ( type ) ; if ( result == null ) { result = EMPTY_TABLE ; } return result ; |
public class Renderer { /** * < p > Render the child components of this { @ link UIComponent } , following
* the rules described for < code > encodeBegin ( ) < / code > to acquire the
* appropriate value to be rendered . This method will only be called
* if the < code > rendersChildren < / code > property of this component
* is < code > true < / code > . < / p >
* @ param context { @ link FacesContext } for the response we are creating
* @ param component { @ link UIComponent } whose children are to be rendered
* @ throws IOException if an input / output error occurs while rendering
* @ throws NullPointerException if < code > context < / code >
* or < code > component < / code > is < code > null < / code > */
public void encodeChildren ( FacesContext context , UIComponent component ) throws IOException { } } | if ( context == null || component == null ) { throw new NullPointerException ( ) ; } if ( component . getChildCount ( ) > 0 ) { Iterator < UIComponent > kids = component . getChildren ( ) . iterator ( ) ; while ( kids . hasNext ( ) ) { UIComponent kid = kids . next ( ) ; kid . encodeAll ( context ) ; } } |
public class Config { /** * Returns the AtomicReferenceConfig for the given name , creating one
* if necessary and adding it to the collection of known configurations .
* The configuration is found by matching the configuration name
* pattern to the provided { @ code name } without the partition qualifier
* ( the part of the name after { @ code ' @ ' } ) .
* If no configuration matches , it will create one by cloning the
* { @ code " default " } configuration and add it to the configuration
* collection .
* This method is intended to easily and fluently create and add
* configurations more specific than the default configuration without
* explicitly adding it by invoking { @ link # addAtomicReferenceConfig ( AtomicReferenceConfig ) } .
* Because it adds new configurations if they are not already present ,
* this method is intended to be used before this config is used to
* create a hazelcast instance . Afterwards , newly added configurations
* may be ignored .
* @ param name name of the AtomicReference config
* @ return the AtomicReference configuration
* @ throws ConfigurationException if ambiguous configurations are found
* @ see StringPartitioningStrategy # getBaseName ( java . lang . String )
* @ see # setConfigPatternMatcher ( ConfigPatternMatcher )
* @ see # getConfigPatternMatcher ( ) */
public AtomicReferenceConfig getAtomicReferenceConfig ( String name ) { } } | return ConfigUtils . getConfig ( configPatternMatcher , atomicReferenceConfigs , name , AtomicReferenceConfig . class ) ; |
public class BridgeActivity { /** * Request for alert window . */
static void requestAlertWindow ( Source source ) { } } | Intent intent = new Intent ( source . getContext ( ) , BridgeActivity . class ) ; intent . putExtra ( KEY_TYPE , BridgeRequest . TYPE_ALERT_WINDOW ) ; source . startActivity ( intent ) ; |
public class DomainRegistrationProvidersInner { /** * Implements Csm operations Api to exposes the list of available Csm Apis under the resource provider .
* Implements Csm operations Api to exposes the list of available Csm Apis under the resource provider .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; CsmOperationDescriptionInner & gt ; object */
public Observable < ServiceResponse < Page < CsmOperationDescriptionInner > > > listOperationsWithServiceResponseAsync ( ) { } } | return listOperationsSinglePageAsync ( ) . concatMap ( new Func1 < ServiceResponse < Page < CsmOperationDescriptionInner > > , Observable < ServiceResponse < Page < CsmOperationDescriptionInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < CsmOperationDescriptionInner > > > call ( ServiceResponse < Page < CsmOperationDescriptionInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listOperationsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ; |
public class Duration { /** * Creates a new { @ code Duration } from given milliseconds .
* @ param millis the duration in milliseconds .
* @ return a new { @ code Duration } from given milliseconds .
* @ throws IllegalArgumentException if the number of milliseconds is out of the range that can be
* represented by { @ code Duration } .
* @ since 0.5 */
public static Duration fromMillis ( long millis ) { } } | long seconds = millis / MILLIS_PER_SECOND ; int nanos = ( int ) ( millis % MILLIS_PER_SECOND * NANOS_PER_MILLI ) ; return Duration . create ( seconds , nanos ) ; |
public class InjectorImpl { /** * Produce a list of " interesting " constructors from a set of ClassNodes .
* Tang Constructors expose a isMoreSpecificThan function that embeds all
* a skyline query over the lattices . Precisely :
* Let candidateConstructors be the union of all constructors defined by
* ClassNodes in candidateImplementations .
* This function returns a set called filteredImplementations , defined as
* follows :
* For each member f of filteredConstructors , there does not exist
* a g in candidateConstructors s . t . g . isMoreSpecificThan ( f ) . */
private < T > List < InjectionPlan < T > > filterCandidateConstructors ( final List < ClassNode < T > > candidateImplementations , final Map < Node , InjectionPlan < ? > > memo ) { } } | final List < InjectionPlan < T > > subIps = new ArrayList < > ( ) ; for ( final ClassNode < T > thisCN : candidateImplementations ) { final List < Constructor < T > > constructors = new ArrayList < > ( ) ; final List < ConstructorDef < T > > constructorList = new ArrayList < > ( ) ; if ( null != c . getLegacyConstructor ( thisCN ) ) { constructorList . add ( c . getLegacyConstructor ( thisCN ) ) ; } constructorList . addAll ( Arrays . asList ( thisCN . getInjectableConstructors ( ) ) ) ; for ( final ConstructorDef < T > def : constructorList ) { final List < InjectionPlan < ? > > args = new ArrayList < > ( ) ; final ConstructorArg [ ] defArgs = def . getArgs ( ) ; for ( final ConstructorArg arg : defArgs ) { if ( ! arg . isInjectionFuture ( ) ) { try { final Node argNode = namespace . getNode ( arg . getName ( ) ) ; buildInjectionPlan ( argNode , memo ) ; args . add ( memo . get ( argNode ) ) ; } catch ( final NameResolutionException e ) { throw new IllegalStateException ( "Detected unresolvable " + "constructor arg while building injection plan. " + "This should have been caught earlier!" , e ) ; } } else { try { args . add ( new InjectionFuturePlan < > ( namespace . getNode ( arg . getName ( ) ) ) ) ; } catch ( final NameResolutionException e ) { throw new IllegalStateException ( "Detected unresolvable " + "constructor arg while building injection plan. " + "This should have been caught earlier!" , e ) ; } } } final Constructor < T > constructor = new Constructor < > ( thisCN , def , args . toArray ( new InjectionPlan [ 0 ] ) ) ; constructors . add ( constructor ) ; } // The constructors are embedded in a lattice defined by
// isMoreSpecificThan ( ) . We want to see if , amongst the injectable
// plans , there is a unique dominant plan , and select it .
// First , compute the set of injectable plans .
final List < Integer > liveIndices = new ArrayList < > ( ) ; for ( int i = 0 ; i < constructors . size ( ) ; i ++ ) { if ( constructors . get ( i ) . getNumAlternatives ( ) > 0 ) { liveIndices . add ( i ) ; } } // Now , do an all - by - all comparison , removing indices that are dominated
// by others .
for ( int i = 0 ; i < liveIndices . size ( ) ; i ++ ) { for ( int j = i + 1 ; j < liveIndices . size ( ) ; j ++ ) { final ConstructorDef < T > ci = constructors . get ( liveIndices . get ( i ) ) . getConstructorDef ( ) ; final ConstructorDef < T > cj = constructors . get ( liveIndices . get ( j ) ) . getConstructorDef ( ) ; if ( ci . isMoreSpecificThan ( cj ) ) { liveIndices . remove ( j ) ; j -- ; } else if ( cj . isMoreSpecificThan ( ci ) ) { liveIndices . remove ( i ) ; // Done with this inner loop invocation . Check the new ci .
i -- ; break ; } } } if ( constructors . size ( ) > 0 ) { subIps . add ( wrapInjectionPlans ( thisCN , constructors , false , liveIndices . size ( ) == 1 ? liveIndices . get ( 0 ) : - 1 ) ) ; } } return subIps ; |
public class Encoding { /** * Adds a binary clause to the given SAT solver .
* @ param s the sat solver
* @ param a the first literal
* @ param b the second literal */
void addBinaryClause ( final MiniSatStyleSolver s , int a , int b ) { } } | this . addBinaryClause ( s , a , b , LIT_UNDEF ) ; |
public class Table { /** * ( non - Javadoc )
* @ see qc . automation . framework . widget . ITable # getTableRowCount ( ) */
@ Override public int getTableRowCount ( ) throws WidgetException { } } | try { return getTableDataInArray ( ) . size ( ) ; } catch ( Exception e ) { throw new WidgetException ( "Error while getting table row count" , generateXPathLocator ( ) , e ) ; } |
public class GoogleAtom { /** * Compute the patch object of key / value pairs from the given original and patched objects , adding
* a { @ code @ gd : fields } key for the fields mask .
* @ param patched patched object
* @ param original original object
* @ return patch object of key / value pairs */
public static Map < String , Object > computePatch ( Object patched , Object original ) { } } | FieldsMask fieldsMask = new FieldsMask ( ) ; ArrayMap < String , Object > result = computePatchInternal ( fieldsMask , patched , original ) ; if ( fieldsMask . numDifferences != 0 ) { result . put ( "@gd:fields" , fieldsMask . buf . toString ( ) ) ; } return result ; |
public class SortUtils { /** * Sort the array in reverse order .
* @ param array
* object to be handled . */
public static < T > void reverse ( final T array ) { } } | if ( ArrayUtils . isArray ( array ) ) { int mlength = Array . getLength ( array ) - 1 ; Object temp = null ; for ( int i = 0 , j = mlength ; i < mlength ; i ++ , j -- ) { Object arg0 = Array . get ( array , i ) ; Object arg1 = Array . get ( array , j ) ; temp = arg0 ; Array . set ( array , i , arg1 ) ; Array . set ( array , j , temp ) ; } } |
public class QueryTracker { /** * Remove completed queries after a waiting period */
private void removeExpiredQueries ( ) { } } | DateTime timeHorizon = DateTime . now ( ) . minus ( minQueryExpireAge . toMillis ( ) ) ; // we ' re willing to keep queries beyond timeHorizon as long as we have fewer than maxQueryHistory
while ( expirationQueue . size ( ) > maxQueryHistory ) { T query = expirationQueue . peek ( ) ; if ( query == null ) { return ; } // expirationQueue is FIFO based on query end time . Stop when we see the
// first query that ' s too young to expire
Optional < DateTime > endTime = query . getEndTime ( ) ; if ( ! endTime . isPresent ( ) ) { // this shouldn ' t happen but it is better to be safe here
continue ; } if ( endTime . get ( ) . isAfter ( timeHorizon ) ) { return ; } // only expire them if they are older than minQueryExpireAge . We need to keep them
// around for a while in case clients come back asking for status
QueryId queryId = query . getQueryId ( ) ; log . debug ( "Remove query %s" , queryId ) ; queries . remove ( queryId ) ; expirationQueue . remove ( query ) ; } |
public class AWSCloud { /** * Gets the epoch form of the text value of the provided node .
* @ param node the node to extact the value from
* @ return the epoch time
* @ throws CloudException */
public static long getTimestampValue ( Node node ) throws CloudException { } } | SimpleDateFormat fmt = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ) ; fmt . setTimeZone ( TimeZone . getTimeZone ( "UTC" ) ) ; String value = getTextValue ( node ) ; try { return fmt . parse ( value ) . getTime ( ) ; } catch ( ParseException e ) { logger . error ( e ) ; e . printStackTrace ( ) ; throw new CloudException ( e ) ; } |
public class BasicModMixer { /** * Processes the Envelopes
* This function now sets the volume - alwayes ! !
* @ since 19.06.2006
* @ param aktMemo */
protected void processEnvelopes ( ChannelMemory aktMemo ) { } } | int currentVolume = aktMemo . currentVolume << Helpers . VOLUMESHIFT ; int currentPanning = aktMemo . panning ; Instrument currentInstrument = aktMemo . assignedInstrument ; if ( currentInstrument != null ) { Envelope volumeEnv = currentInstrument . volumeEnvelope ; if ( volumeEnv != null && volumeEnv . on ) { aktMemo . volEnvPos = volumeEnv . updatePosition ( aktMemo . volEnvPos , aktMemo . keyOff ) ; int newVol = volumeEnv . getValueForPosition ( aktMemo . volEnvPos ) ; currentVolume = ( currentVolume * newVol ) >> 9 ; } Envelope panningEnv = currentInstrument . panningEnvelope ; if ( panningEnv != null && panningEnv . on ) { aktMemo . panEnvPos = panningEnv . updatePosition ( aktMemo . panEnvPos , aktMemo . keyOff ) ; currentPanning += panningEnv . getValueForPosition ( aktMemo . panEnvPos ) - 256 ; } if ( aktMemo . keyOff ) { if ( volumeEnv != null && volumeEnv . on ) { aktMemo . fadeOutVolume -= currentInstrument . volumeFadeOut << 1 ; if ( aktMemo . fadeOutVolume < 0 ) aktMemo . fadeOutVolume = 0 ; } else aktMemo . fadeOutVolume = 0 ; currentVolume = ( currentVolume * aktMemo . fadeOutVolume ) >> 16 ; // max : 65536
} Envelope pitchEnv = currentInstrument . pitchEnvelope ; if ( pitchEnv != null && pitchEnv . on ) { aktMemo . pitchEnvPos = pitchEnv . updatePosition ( aktMemo . pitchEnvPos , aktMemo . keyOff ) ; int pitchValue = pitchEnv . getValueForPosition ( aktMemo . pitchEnvPos ) - 256 ; if ( pitchEnv . filter ) setupChannelFilter ( aktMemo , ! aktMemo . filterOn , pitchValue ) ; else { long a = aktMemo . currentNotePeriod ; long b = 0 ; if ( pitchValue < 0 ) { pitchValue = - pitchValue ; if ( pitchValue > 255 ) pitchValue = 255 ; b = Helpers . LinearSlideUpTable [ pitchValue ] ; } else { if ( pitchValue > 255 ) pitchValue = 255 ; b = Helpers . LinearSlideDownTable [ pitchValue ] ; } setNewPlayerTuningFor ( aktMemo , ( int ) ( ( a * b ) >> 16 ) ) ; } } } // do Panbrello
if ( aktMemo . panbrelloOn ) { final int panningPos = ( aktMemo . panbrelloTablePos >> 2 ) & 0x3F ; // MPTrack starts + 0x10 ( high amplitude )
int newPanning ; switch ( aktMemo . panbrelloType & 0x03 ) { case 1 : newPanning = ( Helpers . ModRampDownTable [ panningPos ] ) ; // Sawtooth
break ; case 2 : newPanning = ( Helpers . ModSquareTable [ panningPos ] ) ; // Squarewave
break ; case 3 : newPanning = ( Helpers . ModRandomTable [ panningPos ] ) ; // Random .
break ; default : newPanning = ( Helpers . ModSinusTable [ panningPos ] ) ; // Sinus
break ; } aktMemo . panbrelloTablePos += aktMemo . panbrelloStep ; newPanning = ( ( newPanning * aktMemo . panbrelloAmplitude ) + 2 ) >> 3 ; newPanning += currentPanning ; currentPanning = ( newPanning < 0 ) ? 0 : ( ( newPanning > 255 ) ? 255 : newPanning ) ; } currentVolume = ( currentVolume * aktMemo . channelVolume ) >> 6 ; // max : 64
// Global Volumes
currentVolume = ( currentVolume * globalVolume ) >> 7 ; // max : 128
currentVolume = ( currentVolume * fadeOutValue ) >> fadeOutFac ; // max : 255
if ( currentVolume < 0 ) currentVolume = 0 ; else if ( currentVolume > Helpers . MAXVOLUME ) currentVolume = Helpers . MAXVOLUME ; if ( currentPanning < 0 ) currentPanning = 0 ; else if ( currentPanning > 256 ) currentPanning = 256 ; aktMemo . actRampVolLeft = aktMemo . actVolumeLeft ; aktMemo . actRampVolRight = aktMemo . actVolumeRight ; aktMemo . actVolumeLeft = currentVolume * ( ( 256 - currentPanning ) << Helpers . VOLUMESHIFT_FULL ) ; aktMemo . actVolumeRight = currentVolume * ( ( currentPanning ) << Helpers . VOLUMESHIFT_FULL ) ; if ( aktMemo . doSurround ) aktMemo . actVolumeLeft = - aktMemo . actVolumeLeft ; if ( aktMemo . actVolumeLeft != aktMemo . actRampVolLeft ) { aktMemo . deltaVolLeft = aktMemo . actVolumeLeft - aktMemo . actRampVolLeft ; if ( aktMemo . deltaVolLeft > volRampLen ) aktMemo . deltaVolLeft /= volRampLen ; else if ( aktMemo . deltaVolLeft != 0 ) aktMemo . deltaVolLeft /= aktMemo . deltaVolLeft ; } else aktMemo . deltaVolLeft = 0 ; if ( aktMemo . actVolumeRight != aktMemo . actRampVolRight ) { aktMemo . deltaVolRight = aktMemo . actVolumeRight - aktMemo . actRampVolRight ; if ( aktMemo . deltaVolRight > volRampLen ) aktMemo . deltaVolRight /= volRampLen ; else if ( aktMemo . deltaVolRight != 0 ) aktMemo . deltaVolRight /= aktMemo . deltaVolRight ; } else aktMemo . deltaVolRight = 0 ; // AutoVibrato
Sample currentSample = aktMemo . currentSample ; if ( currentSample != null && currentSample . vibratoDepth > 0 && aktMemo . currentNotePeriod > 0 ) { if ( currentSample . vibratoSweep == 0 ) aktMemo . autoVibratoAmplitude = currentSample . vibratoDepth << 8 ; else { if ( ! aktMemo . keyOff ) { aktMemo . autoVibratoAmplitude += ( currentSample . vibratoDepth << 8 ) / currentSample . vibratoSweep ; if ( ( aktMemo . autoVibratoAmplitude >> 8 ) > currentSample . vibratoDepth ) aktMemo . autoVibratoAmplitude = currentSample . vibratoDepth << 8 ; } } aktMemo . autoVibratoTablePos += currentSample . vibratoRate ; int periodAdd ; switch ( currentSample . vibratoType & 0x07 ) { default : case 0 : periodAdd = Helpers . ft2VibratoTable [ aktMemo . autoVibratoTablePos & 0xFF ] ; // Sine
break ; case 1 : periodAdd = ( ( aktMemo . autoVibratoTablePos & 0x80 ) == 0x80 ) ? + 64 : - 64 ; // Square
break ; case 2 : periodAdd = ( ( 0x40 + ( aktMemo . autoVibratoTablePos >> 1 ) ) & 0x7f ) - 0x40 ; // Ramp Up
break ; case 3 : periodAdd = ( ( 0x40 - ( aktMemo . autoVibratoTablePos >> 1 ) ) & 0x7F ) - 0x40 ; // Ramp Down
break ; case 4 : periodAdd = ( Helpers . ModRandomTable [ aktMemo . autoVibratoTablePos & 0x3F ] ) ; // Random .
break ; } periodAdd = ( periodAdd * aktMemo . autoVibratoAmplitude ) >> 8 ; setNewPlayerTuningFor ( aktMemo , aktMemo . currentNotePeriod + ( periodAdd >> 4 ) ) ; } |
public class Widget { /** * Gets Widget layout dimension
* @ param axis The { @ linkplain Layout . Axis axis } to obtain layout size for
* @ return dimension */
public float getLayoutSize ( final Layout . Axis axis ) { } } | float size = 0 ; for ( Layout layout : mLayouts ) { size = Math . max ( size , layout . getSize ( axis ) ) ; } Log . d ( Log . SUBSYSTEM . LAYOUT , TAG , "getLayoutSize [%s] axis [%s] size [%f]" , getName ( ) , axis , size ) ; return size ; |
public class OfferService { /** * Sends an incremental block report to the Namenode .
* @ param startTime
* the time when we started processing the last heartbeat
* @ throws Exception
* if there is an error in reporting blocks to the NameNode */
private void sendIncrementalBlockReport ( long startTime ) throws Exception { } } | // check if there are newly received blocks
Block [ ] receivedAndDeletedBlockArray = null ; int numBlocksReceivedAndDeleted = 0 ; int currentPendingRequests = 0 ; synchronized ( receivedAndDeletedBlockList ) { // construct the ACKs array
lastDeletedReport = startTime ; numBlocksReceivedAndDeleted = receivedAndDeletedBlockList . size ( ) ; if ( numBlocksReceivedAndDeleted > 0 ) { receivedAndDeletedBlockArray = receivedAndDeletedBlockList . toArray ( new Block [ numBlocksReceivedAndDeleted ] ) ; receivedAndDeletedBlockList . clear ( ) ; currentPendingRequests = pendingReceivedRequests ; pendingReceivedRequests = 0 ; } } // process received + deleted
// if exception is thrown , add all blocks to the retry list
if ( receivedAndDeletedBlockArray != null ) { long [ ] failed = null ; try { IncrementalBlockReport ibr = new IncrementalBlockReport ( receivedAndDeletedBlockArray ) ; long rpcStartTime = 0 ; if ( LOG . isDebugEnabled ( ) ) { rpcStartTime = System . nanoTime ( ) ; LOG . debug ( "sending blockReceivedAndDeletedNew " + receivedAndDeletedBlockArray . length + " blocks to " + namenodeAddress ) ; } failed = avatarnode . blockReceivedAndDeletedNew ( nsRegistration , ibr ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "finished blockReceivedAndDeletedNew " + "to " + namenodeAddress + " time: " + ( System . nanoTime ( ) - rpcStartTime ) + " ns" ) ; } boolean isPrimaryCached = isPrimaryServiceCached ( ) ; // if we talk to primary failed must be null
// if we talk to standby failed shouldn ' t be null
if ( isPrimaryCached && failed != null ) { // this should never happen
// the primary can ' t switch to standby
throw new IOException ( "Primary started acting as standby" ) ; } else if ( ! isPrimaryCached && failed == null ) { String msg = "Received null response from standby for incremental" + " block report. " ; if ( clearPrimaryCommandProcessed ) { LOG . info ( msg + "Failover is in progress" + " - will not clear primary again" ) ; } else { LOG . info ( msg + "Standby is acting as primary. Clearing primary" ) ; // failover - we need to refresh our knowledge
this . clearPrimary ( ) ; } } } catch ( Exception e ) { processFailedBlocks ( receivedAndDeletedBlockArray , currentPendingRequests ) ; throw e ; } if ( failed != null && failed . length != 0 ) { processFailedReceivedDeleted ( failed , receivedAndDeletedBlockArray ) ; } } |
public class UiCollection { /** * Call a function on this object
* @ param method
* @ param args
* @ return
* @ throws Exception */
public JSONArray call ( String method , Object ... args ) throws Exception { } } | return super . callMethod ( method , args ) ; |
public class VariableScope { /** * Gets a map containing the variables declared in this scope .
* This map cannot be modified .
* @ return a map containing the declared variable references */
public Map < String , Variable > getDeclaredVariables ( ) { } } | if ( declaredVariables == Collections . EMPTY_MAP ) { return declaredVariables ; } else { return Collections . unmodifiableMap ( declaredVariables ) ; } |
public class CommonOps_DDF4 { /** * < p > Performs an element by element division operation : < br >
* < br >
* a < sub > i < / sub > = a < sub > i < / sub > / b < sub > i < / sub > < br >
* @ param a The left vector in the division operation . Modified .
* @ param b The right vector in the division operation . Not modified . */
public static void elementDiv ( DMatrix4 a , DMatrix4 b ) { } } | a . a1 /= b . a1 ; a . a2 /= b . a2 ; a . a3 /= b . a3 ; a . a4 /= b . a4 ; |
public class CopyingConverter { /** * Intermediate step to create a { @ link CopyingConverter } instance that instantiates the ( most likely abstract )
* target type using the default constructor of a specific implementation .
* @ param targetType
* @ return { @ link Implementing } step */
public static < TARGET > Implementing < TARGET > implementing ( Class < TARGET > targetType ) { } } | return new Implementing < > ( TypeUtils . wrap ( targetType ) ) ; |
public class NetworkCalibration { /** * Fill the two output HashMap . */
private void getNetData ( ) { } } | int nTime = lastTimeDischarge . length ; int length = lastTimeDischarge [ 0 ] . length ; HashMap < Integer , double [ ] > tmpHMDis = new LinkedHashMap < Integer , double [ ] > ( ) ; HashMap < Integer , double [ ] > tmpHMFill = new LinkedHashMap < Integer , double [ ] > ( ) ; // order the outpt .
int netLength = networkPipes . length ; double [ ] one = new double [ netLength ] ; double [ ] two = new double [ netLength ] ; for ( int i = 0 ; i < netLength ; i ++ ) { one [ i ] = i ; two [ i ] = networkPipes [ i ] . getId ( ) ; } QuickSortAlgorithm sort = new QuickSortAlgorithm ( pm ) ; sort . sort ( two , one ) ; for ( int i = 0 ; i < length - 1 ; i ++ ) { int index = ( int ) one [ i ] ; tmpHMDis . put ( networkPipes [ index ] . getId ( ) , new double [ ] { lastTimeDischarge [ 0 ] [ index ] } ) ; tmpHMFill . put ( networkPipes [ index ] . getId ( ) , new double [ ] { lastTimeFillDegree [ 0 ] [ index ] } ) ; } discharge . put ( first , tmpHMDis ) ; fillDegree . put ( first , tmpHMFill ) ; DateTime tmp = first ; for ( int i = 1 ; i < nTime ; ++ i ) { tmp = tmp . plusMinutes ( dt ) ; tmpHMDis = new LinkedHashMap < Integer , double [ ] > ( ) ; tmpHMFill = new LinkedHashMap < Integer , double [ ] > ( ) ; for ( int j = 0 ; j < length - 1 ; j ++ ) { int index = ( int ) one [ j ] ; tmpHMDis . put ( networkPipes [ index ] . getId ( ) , new double [ ] { lastTimeDischarge [ i ] [ index ] } ) ; tmpHMFill . put ( networkPipes [ index ] . getId ( ) , new double [ ] { lastTimeFillDegree [ i ] [ index ] } ) ; } discharge . put ( tmp , tmpHMDis ) ; fillDegree . put ( tmp , tmpHMFill ) ; } |
public class ClientConfigUtil { /** * Compares two avro strings which contains single store configs
* @ param configAvro1
* @ param configAvro2
* @ return true if two config avro strings have same content */
public static Boolean compareSingleClientConfigAvro ( String configAvro1 , String configAvro2 ) { } } | Properties props1 = readSingleClientConfigAvro ( configAvro1 ) ; Properties props2 = readSingleClientConfigAvro ( configAvro2 ) ; if ( props1 . equals ( props2 ) ) { return true ; } else { return false ; } |
public class FileUtil { /** * Determines if the specified directory is an ancestor of the specified
* file or directory .
* @ param file The file or directory to test .
* @ param ancestor The directory for which to determine whether
* < code > file < / code > is an ancestor .
* @ return < code > true < / code > if < code > ancestor < / code > is equal to or an
* ancestor of < code > file < / code > , < code > false < / code > otherwise .
* @ throws IOException if an error occurs in attempting to canonicalize
* { @ code file } or { @ code ancestor } . */
public static boolean isAncestor ( File file , File ancestor ) throws IOException { } } | file = file . getCanonicalFile ( ) ; ancestor = ancestor . getCanonicalFile ( ) ; do { if ( file . equals ( ancestor ) ) { return true ; } file = file . getParentFile ( ) ; } while ( file != null ) ; return false ; |
public class LoadBalancerNodeFilter { /** * { @ inheritDoc } */
@ Override public LoadBalancerNodeFilter and ( LoadBalancerNodeFilter otherFilter ) { } } | if ( evaluation instanceof SingleFilterEvaluation && otherFilter . evaluation instanceof SingleFilterEvaluation ) { return new LoadBalancerNodeFilter ( getLoadBalancerPoolFilter ( ) . and ( otherFilter . getLoadBalancerPoolFilter ( ) ) , getPredicate ( ) . and ( otherFilter . getPredicate ( ) ) ) ; } else { evaluation = new AndEvaluation < > ( evaluation , otherFilter , LoadBalancerNodeMetadata :: getIpAddress ) ; return this ; } |
public class AWSBudgetsClient { /** * Updates a notification .
* @ param updateNotificationRequest
* Request of UpdateNotification
* @ return Result of the UpdateNotification operation returned by the service .
* @ throws InternalErrorException
* An error on the server occurred during the processing of your request . Try again later .
* @ throws InvalidParameterException
* An error on the client occurred . Typically , the cause is an invalid input value .
* @ throws NotFoundException
* We can ’ t locate the resource that you specified .
* @ throws DuplicateRecordException
* The budget name already exists . Budget names must be unique within an account .
* @ sample AWSBudgets . UpdateNotification */
@ Override public UpdateNotificationResult updateNotification ( UpdateNotificationRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateNotification ( request ) ; |
public class OMVRBTree { /** * Gets the entry corresponding to the specified key ; if no such entry exists , returns the entry for the least key greater than
* the specified key ; if no such entry exists ( i . e . , the greatest key in the Tree is less than the specified key ) , returns
* < tt > null < / tt > .
* @ param key
* Key to search .
* @ param partialSearchMode
* In case of { @ link OCompositeKey } key is passed in this parameter will be used to find preferred one . */
public OMVRBTreeEntry < K , V > getCeilingEntry ( final K key , final PartialSearchMode partialSearchMode ) { } } | OMVRBTreeEntry < K , V > p = getEntry ( key , true , partialSearchMode ) ; if ( p == null ) return null ; if ( pageItemFound ) return p ; // NOT MATCHED , POSITION IS ALREADY TO THE NEXT ONE
else if ( pageIndex < p . getSize ( ) ) { if ( key instanceof OCompositeKey ) return adjustSearchResult ( ( OCompositeKey ) key , partialSearchMode , p ) ; else return p ; } return null ; |
public class TvInputProvider { /** * Queries the channel list to find the given channel and then queries the EPG ( electronic
* programming guide ) to find the program that is playing right now .
* @ param channel The channel you are tuned to
* @ return The current program on provided channel */
public Program getProgramRightNow ( Channel channel ) { } } | ApplicationInfo app = null ; try { app = getApplicationContext ( ) . getPackageManager ( ) . getApplicationInfo ( getApplicationContext ( ) . getPackageName ( ) , PackageManager . GET_META_DATA ) ; Bundle bundle = app . metaData ; final String service = bundle . getString ( "TvInputService" ) ; String channels = TvContract . buildInputId ( new ComponentName ( getPackageName ( ) , service . substring ( getPackageName ( ) . length ( ) ) ) ) ; Log . d ( TAG , new ComponentName ( getPackageName ( ) , service . substring ( getPackageName ( ) . length ( ) ) ) . flattenToString ( ) ) ; List < Program > programs = getPrograms ( getApplicationContext ( ) , TvContract . buildChannelUri ( channel . getChannelId ( ) ) ) ; Log . d ( TAG , "Program from channel " + TvContract . buildChannelUri ( channel . getChannelId ( ) ) ) ; Log . d ( TAG , "Program from chanel " + channel . getChannelId ( ) ) ; Program currentProgram = null ; for ( Program p : programs ) { if ( p . getStartTimeUtcMillis ( ) < new Date ( ) . getTime ( ) ) { currentProgram = p ; } } return currentProgram ; } catch ( PackageManager . NameNotFoundException e ) { e . printStackTrace ( ) ; } return null ; |
public class Supplier { /** * The predefined Supplier selects all values and performs the given
* { @ link com . hazelcast . mapreduce . aggregation . PropertyExtractor } s transformation to the
* input data . The returned value type of the transformation needs to match the expected
* value type of the aggregation .
* @ param < KeyIn > the input key type
* @ param < ValueIn > the input value type
* @ param < ValueOut > the supplied value type
* @ return all values from the underlying data structure transformed using the given PropertyExtractor */
public static < KeyIn , ValueIn , ValueOut > Supplier < KeyIn , ValueIn , ValueOut > all ( PropertyExtractor < ValueIn , ValueOut > propertyExtractor ) { } } | return new AcceptAllSupplier ( propertyExtractor ) ; |
public class MutualInformationEntropyPhraseExtractor { /** * 一句话提取
* @ param text
* @ param size
* @ return */
public static List < String > extract ( String text , int size ) { } } | IPhraseExtractor extractor = new MutualInformationEntropyPhraseExtractor ( ) ; return extractor . extractPhrase ( text , size ) ; |
public class MimeTypeDetector { /** * Determines the MIME type of a file .
* The file must exist and be readable .
* The CompletionStage may return a { @ link ExecutionException } which is caused
* by a { @ link GetBytesException } . ( That , in turn , will wrap a
* { @ link IOException } or other exception that prevented getBytesAsync ( ) from
* working .
* @ param path A file that exists and is readable
* @ return a MIME type such as { @ literal " text / plain " }
* @ see # detectMimeType ( String , Callable ) */
public CompletionStage < String > detectMimeTypeAsync ( final Path path ) { } } | String filename = path . getFileName ( ) . toString ( ) ; Supplier < CompletionStage < byte [ ] > > supplier = ( ) -> { final CompletableFuture < byte [ ] > futureBytes = new CompletableFuture < byte [ ] > ( ) ; AsynchronousFileChannel channel ; try { channel = AsynchronousFileChannel . open ( path , StandardOpenOption . READ ) ; } catch ( IOException e ) { futureBytes . completeExceptionally ( new GetBytesException ( e ) ) ; return futureBytes ; } final ByteBuffer buf = ByteBuffer . allocate ( getMaxGetBytesLength ( ) ) ; channel . read ( buf , 0 , futureBytes , new CompletionHandler < Integer , CompletableFuture < byte [ ] > > ( ) { @ Override public void completed ( Integer nBytes , CompletableFuture < byte [ ] > f ) { if ( nBytes == - 1 ) nBytes = 0 ; // handle empty file
byte [ ] bytes = new byte [ nBytes ] ; buf . rewind ( ) ; buf . get ( bytes , 0 , nBytes ) ; f . complete ( bytes ) ; } @ Override public void failed ( Throwable exc , CompletableFuture < byte [ ] > f ) { f . completeExceptionally ( new GetBytesException ( exc ) ) ; } } ) ; return futureBytes ; } ; return detectMimeTypeAsync ( filename , supplier ) ; |
public class IOUtils { /** * Create a reader from specified { @ code source } . < br >
* Returned reader should be explicitly closed after use .
* @ param uri source URI
* @ return reader
* @ throws IOException if the connection cannot be opened */
public static Reader uriReader ( URI uri ) throws IOException { } } | return new BufferedReader ( new InputStreamReader ( uri . toURL ( ) . openStream ( ) , StandardCharsets . UTF_8 ) ) ; |
public class ResourceBundlesHandlerImpl { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . handler . ResourceBundlesHandler #
* notifyModification ( java . util . List ) */
@ Override public void notifyModification ( List < JoinableResourceBundle > bundles ) { } } | for ( JoinableResourceBundle bundle : bundles ) { if ( LOGGER . isInfoEnabled ( ) && ! bundle . isDirty ( ) ) { LOGGER . info ( "The bundle '" + bundle . getId ( ) + "' has been modified and needs to be rebuild." ) ; } bundle . setDirty ( true ) ; // Update the composite bundles which are linked to this bundle if
// they exists
List < JoinableResourceBundle > linkedBundles = compositeResourceBundleMap . get ( bundle . getName ( ) ) ; if ( linkedBundles != null ) { for ( JoinableResourceBundle compositeBundle : linkedBundles ) { if ( LOGGER . isInfoEnabled ( ) && ! compositeBundle . isDirty ( ) ) { LOGGER . info ( "The composite bundle '" + compositeBundle . getId ( ) + "', whose child has been modified, needs to be rebuild." ) ; } compositeBundle . setDirty ( true ) ; } } } |
public class AmazonInspectorClient { /** * Lists the agents of the assessment runs that are specified by the ARNs of the assessment runs .
* @ param listAssessmentRunAgentsRequest
* @ return Result of the ListAssessmentRunAgents operation returned by the service .
* @ throws InternalException
* Internal server error .
* @ throws InvalidInputException
* The request was rejected because an invalid or out - of - range value was supplied for an input parameter .
* @ throws AccessDeniedException
* You do not have required permissions to access the requested resource .
* @ throws NoSuchEntityException
* The request was rejected because it referenced an entity that does not exist . The error code describes
* the entity .
* @ sample AmazonInspector . ListAssessmentRunAgents
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / inspector - 2016-02-16 / ListAssessmentRunAgents "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListAssessmentRunAgentsResult listAssessmentRunAgents ( ListAssessmentRunAgentsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAssessmentRunAgents ( request ) ; |
public class DefaultGrailsApplication { /** * Re - initialize the artefacts of the specified type . This gives handlers a chance to update caches etc .
* @ param handler The handler to register */
protected void initializeArtefacts ( ArtefactHandler handler ) { } } | if ( handler == null ) { return ; } ArtefactInfo info = getArtefactInfo ( handler . getType ( ) ) ; // Only init those that have data
if ( info != null ) { // System . out . println ( " Initialising artefacts of kind " + handler . getType ( ) + " with registered artefacts " + info . getGrailsClassesByName ( ) ) ;
handler . initialize ( info ) ; } |
public class SpreadUtils { /** * For each element of array , generate a question .
* @ param args
* @ return */
public static < E > String generateQuestion ( byte [ ] args ) { } } | if ( args == null || args . length == 0 ) return "" ; return generateInternal ( args . length ) ; |
public class DaylightModel { /** * Lookup of the number of valence electrons for elements near those which
* this model considers aromatic . As only the { @ link # aromaticElement ( int ) }
* are checked we need only consider elements within a charge range .
* @ param element the atomic number of an element
* @ return the valence
* @ throws UnsupportedOperationException encountered an element which the
* valence was not encoded for */
private int valence ( int element ) { } } | switch ( element ) { case 5 : // boron
case 13 : // aluminium
case 31 : // gallium
return 3 ; case CARBON : case 14 : // silicon
case 32 : // germanium
return 4 ; case NITROGEN : case PHOSPHORUS : case ARSENIC : return 5 ; case OXYGEN : case SULPHUR : case SELENIUM : return 6 ; case 9 : // fluorine
case 17 : // chlorine
case 35 : // bromine
return 7 ; } throw new UnsupportedOperationException ( "Valence not yet handled for element with atomic number " + element ) ; |
public class AbstractMessages { /** * Gets a bundle . First tries to find one in the cache , then loads it if
* it can ' t find one . */
private ResourceBundle getBundle ( ) { } } | String bundleKey = getBundleKey ( ) ; if ( bundles . containsKey ( bundleKey ) ) { return bundles . get ( bundleKey ) ; } else { ResourceBundle bundle = loadBundle ( ) ; bundles . put ( bundleKey , bundle ) ; return bundle ; } |
public class SimpleSVGViewer { /** * Save / export the current plot . */
public void saveCurrentPlot ( ) { } } | // TODO : exclude " do not export " layers !
final SVGPlot currentPlot = svgCanvas . getPlot ( ) ; if ( currentPlot != null ) { SVGSaveDialog . showSaveDialog ( currentPlot , 512 , 512 ) ; } else { LoggingUtil . warning ( "saveCurrentPlot() called without a visible plot!" ) ; } |
public class RecursiveMethodCallFinder { /** * Recursive helper function to find subsequent method calls of the given method . Uses " dict " as storage for method call during recursion , storing all
* called { @ link CtMethod } objects in it . */
private Map < CtClass , Set < CtMethod > > find ( final CtMethod declaredMethod , final int level , final Map < CtClass , Set < CtMethod > > dict ) { } } | if ( level < 0 ) { throw new IllegalArgumentException ( "level < 0" ) ; } addToMap ( declaredMethod . getDeclaringClass ( ) , declaredMethod , dict ) ; if ( level > 0 ) { try { declaredMethod . instrument ( new ExprEditor ( ) { @ Override public void edit ( final MethodCall m ) throws CannotCompileException { try { CtMethod method = m . getMethod ( ) ; LOG . info ( method . getLongName ( ) + " / " + level ) ; find ( method , level - 1 , dict ) ; } catch ( NotFoundException e ) { e . printStackTrace ( ) ; // should not happen
} super . edit ( m ) ; } } ) ; } catch ( CannotCompileException e ) { // cannot possibly be thrown due to the fact that we don ' t change anything here
e . printStackTrace ( ) ; } } return dict ; |
public class FSDirectory { /** * Set file replication
* @ param src file name
* @ param replication new replication
* @ param oldReplication old replication - output parameter
* @ return array of file blocks
* @ throws IOException */
BlockInfo [ ] setReplication ( String src , short replication , int [ ] oldReplication ) throws IOException { } } | waitForReady ( ) ; BlockInfo [ ] fileBlocks = unprotectedSetReplication ( src , replication , oldReplication ) ; if ( fileBlocks != null ) // log replication change
fsImage . getEditLog ( ) . logSetReplication ( src , replication ) ; return fileBlocks ; |
public class AcceptLanguageList { /** * Return the associated quality of the given language .
* @ param sLanguage
* The language name to query . May not be < code > null < / code > .
* @ return The associated { @ link QValue } . Never < code > null < / code > . */
@ Nonnull public QValue getQValueOfLanguage ( @ Nonnull final String sLanguage ) { } } | ValueEnforcer . notNull ( sLanguage , "Language" ) ; // Find language direct
QValue aQuality = m_aMap . get ( _unify ( sLanguage ) ) ; if ( aQuality == null ) { // If not explicitly given , check for " * "
aQuality = m_aMap . get ( AcceptLanguageHandler . ANY_LANGUAGE ) ; if ( aQuality == null ) { // Neither language nor " * " is present
// - > assume minimum quality
return QValue . MIN_QVALUE ; } } return aQuality ; |
public class KeyValueSources { /** * Creates a new source using a zip file and a function that maps each entry in the zip file to
* a unique key . The caller must ensure that the zip file is not closed or modified , otherwise all
* behavior is undefined . All files in the zip file will be used ; there is currently no way to
* exclude specific files . Use a default identity - like function that defines the mapping between
* keys and the entry used for their values , see { @ link # fromZip ( ZipFile ) } .
* @ param zipFile the zip file to use as a source
* @ param idExtractor a function that returns a unique id for every file contained in the zip
* @ return a new key - value source backed by the specified zip file
* @ see # fromZip ( ZipFile ) */
@ Nonnull public static ImmutableKeyValueSource < Symbol , ByteSource > fromZip ( final ZipFile zipFile , final Function < String , Symbol > idExtractor ) { } } | final ImmutableMap . Builder < Symbol , String > ret = ImmutableMap . builder ( ) ; // Build a map of the key for each file to the filename
final Enumeration < ? extends ZipEntry > entries = zipFile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { final ZipEntry entry = entries . nextElement ( ) ; final String name = entry . getName ( ) ; // Skip directories
if ( entry . isDirectory ( ) ) { continue ; } final Symbol id = checkNotNull ( idExtractor . apply ( name ) ) ; ret . put ( id , name ) ; } return new ZipKeyValueSource ( zipFile , ret . build ( ) ) ; |
public class ServletHandler { protected void notFound ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } } | if ( log . isDebugEnabled ( ) ) log . debug ( "Not Found " + request . getRequestURI ( ) ) ; String method = request . getMethod ( ) ; // Not found special requests .
if ( method . equals ( HttpRequest . __GET ) || method . equals ( HttpRequest . __HEAD ) || method . equals ( HttpRequest . __POST ) ) { response . sendError ( HttpResponse . __404_Not_Found ) ; } else if ( method . equals ( HttpRequest . __TRACE ) ) handleTrace ( request , response ) ; else if ( method . equals ( HttpRequest . __OPTIONS ) ) handleOptions ( request , response ) ; else { // Unknown METHOD
response . setHeader ( HttpFields . __Allow , __AllowString ) ; response . sendError ( HttpResponse . __405_Method_Not_Allowed ) ; } |
public class ServiceMethodInvoker { /** * Invokes service method with bidirectional communication .
* @ param publisher request service message
* @ param dataDecoder function to create new service message with decoded data
* @ return flux of service messages */
public Flux < ServiceMessage > invokeBidirectional ( Publisher < ServiceMessage > publisher , BiFunction < ServiceMessage , Class < ? > , ServiceMessage > dataDecoder ) { } } | return Flux . from ( publisher ) . map ( message -> toRequest ( message , dataDecoder ) ) . transform ( this :: invoke ) . map ( this :: toResponse ) . onErrorResume ( throwable -> Flux . just ( errorMapper . toMessage ( throwable ) ) ) ; |
public class ShuttleList { /** * Get the index of a given object in the underlying data model .
* @ param o Object to locate
* @ return index of object in model , - 1 if not found */
protected int indexOf ( final Object o ) { } } | final int size = dataModel . getSize ( ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( comparator == null ) { if ( o . equals ( dataModel . getElementAt ( i ) ) ) { return i ; } } else if ( comparator . compare ( o , dataModel . getElementAt ( i ) ) == 0 ) { return i ; } } return - 1 ; |
public class DeleteProjectRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteProjectRequest deleteProjectRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteProjectRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteProjectRequest . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class NumberPath { /** * Method to construct the between expression for double
* @ param startValue the start value
* @ param endValue the end value
* @ return Expression */
public Expression < Double > between ( Double startValue , Double endValue ) { } } | String valueString = "'" + startValue + "' AND '" + endValue + "'" ; return new Expression < Double > ( this , Operation . between , valueString ) ; |
public class JSONSerializer { /** * Method to add custom Json serializers for all Enum classes
* @ param objectMapper the Jackson object mapper */
private void registerModulesForEnum ( ObjectMapper objectMapper ) { } } | SimpleModule module = new SimpleModule ( "AccountClassificationEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( AccountClassificationEnum . class , new AccountClassificationEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "AccountSubTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( AccountSubTypeEnum . class , new AccountSubTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "AccountTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( AccountTypeEnum . class , new AccountTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "AcquiredAsEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( AcquiredAsEnum . class , new AcquiredAsEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "APCreditCardOperationEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( APCreditCardOperationEnum . class , new APCreditCardOperationEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "AttachableCategoryEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( AttachableCategoryEnum . class , new AttachableCategoryEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "BillableStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( BillableStatusEnum . class , new BillableStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "BillPaymentTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( BillPaymentTypeEnum . class , new BillPaymentTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CCAVSMatchEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CCAVSMatchEnum . class , new CCAVSMatchEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CCPaymentStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CCPaymentStatusEnum . class , new CCPaymentStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CCSecurityCodeMatchEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CCSecurityCodeMatchEnum . class , new CCSecurityCodeMatchEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CCTxnModeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CCTxnModeEnum . class , new CCTxnModeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CCTxnTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CCTxnTypeEnum . class , new CCTxnTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "ColumnTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( ColumnTypeEnum . class , new ColTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "ContactTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( ContactTypeEnum . class , new ContactTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CreditCardTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CreditCardTypeEnum . class , new CreditCardTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CustomerTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CustomerTypeEnum . class , new CustomerTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "CustomFieldTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( CustomFieldTypeEnum . class , new CustomFieldTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "DayOfWeekEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( DayOfWeekEnum . class , new DayOfWeekEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EmailAddressTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EmailAddressTypeEnum . class , new EmailAddressTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EmailStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EmailStatusEnum . class , new EmailStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EmployeeTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EmployeeTypeEnum . class , new EmployeeTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EntityStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EntityStatusEnum . class , new EntityStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EntityTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EntityTypeEnum . class , new EntityTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "EstimateStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( EstimateStatusEnum . class , new EstimateStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "FaultTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( FaultTypeEnum . class , new FaultTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "GenderEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( Gender . class , new GenderEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "GlobalTaxCalculationEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( GlobalTaxCalculationEnum . class , new GlobalTaxCalculationEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "IdDomainEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( IdDomainEnum . class , new IdDomainEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "ItemTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( ItemTypeEnum . class , new ItemTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "JobStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( JobStatusEnum . class , new JobStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "LineDetailTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( LineDetailTypeEnum . class , new LineDetailTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "MonthEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( MonthEnum . class , new MonthEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "OperationEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( OperationEnum . class , new OperationEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PaymentMethodEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PaymentMethodEnum . class , new PaymentMethodEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PaymentStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PaymentStatusEnum . class , new PaymentStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PaymentTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PaymentTypeEnum . class , new PaymentTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PaySalesTaxEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PaySalesTaxEnum . class , new PaySalesTaxEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PerItemAdjustEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PerItemAdjustEnum . class , new PerItemAdjustEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PhysicalAddressTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PhysicalAddressTypeEnum . class , new PhysicalAddressTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PostingTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PostingTypeEnum . class , new PostingTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PriceLevelTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PriceLevelTypeEnum . class , new PriceLevelTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PrintStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PrintStatusEnum . class , new PrintStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "PurchaseOrderStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( PurchaseOrderStatusEnum . class , new PurchaseOrderStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "QboEstimateStatusEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( QboEstimateStatusEnum . class , new QboEstimateStatusEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "ReimbursableTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( ReimbursableTypeEnum . class , new ReimbursableTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "ReportBasisEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( ReportBasisEnum . class , new ReportBasisEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; // module = new SimpleModule ( " ReportNameEnum " , new Version ( 1 , 0 , 0 , null ) ) ;
// module . addSerializer ( ReportNameEnum . class , new ReportNameEnumJsonSerializer ( ) ) ;
// objectMapper . registerModule ( module ) ;
module = new SimpleModule ( "RoundingMethodEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( RoundingMethodEnum . class , new RoundingMethodEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SalesRepTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SalesRepTypeEnum . class , new SalesRepTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SalesTermTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SalesTermTypeEnum . class , new SalesTermTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SpecialItemTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SpecialItemTypeEnum . class , new SpecialItemTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SpecialTaxTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SpecialTaxTypeEnum . class , new SpecialTaxTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SummarizeColumnsByEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SummarizeColumnsByEnum . class , new SummarizeColumnsByEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "SymbolPositionEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( SymbolPositionEnum . class , new SymbolPositionEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TaxRateDisplayTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TaxRateDisplayTypeEnum . class , new TaxRateDisplayTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TaxTypeApplicablityEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TaxTypeApplicablityEnum . class , new TaxTypeApplicablityEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TelephoneDeviceTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TelephoneDeviceTypeEnum . class , new TelephoneDeviceTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TelephoneNumberTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TelephoneNumberTypeEnum . class , new TelephoneNumberTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TemplateTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TemplateTypeEnum . class , new TemplateTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TimeActivityTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TimeActivityTypeEnum . class , new TimeActivityTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TimeEntryUsedForPaychecksEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TimeEntryUsedForPaychecksEnum . class , new TimeEntryUsedForPaychecksEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TxnTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TxnTypeEnum . class , new TxnTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "UOMBaseTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( UOMBaseTypeEnum . class , new UOMBaseTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "UOMFeatureTypeEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( UOMFeatureTypeEnum . class , new UOMFeatureTypeEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "WeekEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( WeekEnum . class , new WeekEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TaxRateApplicableOnEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TaxRateApplicableOnEnum . class , new TaxRateApplicableOnEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; module = new SimpleModule ( "TaxApplicableOnEnum" , new Version ( 1 , 0 , 0 , null ) ) ; module . addSerializer ( TaxApplicableOnEnum . class , new TaxApplicableOnEnumJsonSerializer ( ) ) ; objectMapper . registerModule ( module ) ; |
public class JavaMethod { /** * Returns the code attribute . */
public CodeAttribute getCode ( ) { } } | for ( int i = 0 ; i < _attributes . size ( ) ; i ++ ) { Attribute attr = _attributes . get ( i ) ; if ( attr instanceof CodeAttribute ) return ( CodeAttribute ) attr ; } return null ; |
public class ObjectInputStream { /** * Creates the proxy class that implements the interfaces specified in
* { @ code interfaceNames } .
* @ param interfaceNames
* the interfaces used to create the proxy class .
* @ return the proxy class .
* @ throws ClassNotFoundException
* if the proxy class or any of the specified interfaces cannot
* be created .
* @ throws IOException
* if an error occurs while reading from the source stream .
* @ see ObjectOutputStream # annotateProxyClass ( Class ) */
protected Class < ? > resolveProxyClass ( String [ ] interfaceNames ) throws IOException , ClassNotFoundException { } } | ClassLoader loader = callerClassLoader ; Class < ? > [ ] interfaces = new Class < ? > [ interfaceNames . length ] ; for ( int i = 0 ; i < interfaceNames . length ; i ++ ) { interfaces [ i ] = Class . forName ( interfaceNames [ i ] , false , loader ) ; } try { return Proxy . getProxyClass ( loader , interfaces ) ; } catch ( IllegalArgumentException e ) { throw new ClassNotFoundException ( e . toString ( ) , e ) ; } |
public class OperatorTable { /** * Adds an infix left - associative binary operator .
* @ param parser the parser for the operator .
* @ param precedence the precedence number .
* @ return this . */
public OperatorTable < T > infixl ( Parser < ? extends BiFunction < ? super T , ? super T , ? extends T > > parser , int precedence ) { } } | ops . add ( new Operator ( parser , precedence , Associativity . LASSOC ) ) ; return this ; |
public class EndpointResponseMarshaller { /** * Marshall the given parameter object . */
public void marshall ( EndpointResponse endpointResponse , ProtocolMarshaller protocolMarshaller ) { } } | if ( endpointResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( endpointResponse . getAddress ( ) , ADDRESS_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getAttributes ( ) , ATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getChannelType ( ) , CHANNELTYPE_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getCohortId ( ) , COHORTID_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getDemographic ( ) , DEMOGRAPHIC_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getEffectiveDate ( ) , EFFECTIVEDATE_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getEndpointStatus ( ) , ENDPOINTSTATUS_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getMetrics ( ) , METRICS_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getOptOut ( ) , OPTOUT_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getRequestId ( ) , REQUESTID_BINDING ) ; protocolMarshaller . marshall ( endpointResponse . getUser ( ) , USER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BinaryLogRecordSerializerImpl { /** * sequence number of message . */
private void writeFixedFields ( long millis , Level level , int threadId , long seq , DataOutput writer ) throws IOException { } } | // Write time first for ease of time filtering .
writer . writeLong ( millis ) ; // Write our own sequence number for merging purposes .
long seqNumber ; synchronized ( BinaryLogRecordSerializerImpl . class ) { seqNumber = counter ++ ; } writer . writeLong ( seqNumber ) ; // Write data always present in a log record .
writer . writeShort ( level . intValue ( ) ) ; writer . writeInt ( threadId ) ; writer . writeLong ( seq ) ; |
public class AbstractMarshaller { /** * Template method for handling { @ code SAXResult } s .
* < p > This implementation delegates to { @ code marshalSaxHandlers } .
* @ param graph the root of the object graph to marshal
* @ param saxResult the { @ code SAXResult }
* @ throws XmlMappingException if the given object cannot be marshalled to the result
* @ see # marshalSaxHandlers ( Object , org . xml . sax . ContentHandler , org . xml . sax . ext . LexicalHandler ) */
protected void marshalSaxResult ( Object graph , SAXResult saxResult ) throws XmlMappingException { } } | ContentHandler contentHandler = saxResult . getHandler ( ) ; Assert . notNull ( contentHandler , "ContentHandler not set on SAXResult" ) ; LexicalHandler lexicalHandler = saxResult . getLexicalHandler ( ) ; marshalSaxHandlers ( graph , contentHandler , lexicalHandler ) ; |
public class DateUtils { /** * < p > Rounds a date , leaving the field specified as the most
* significant field . < / p >
* < p > For example , if you had the date - time of 28 Mar 2002
* 13:45:01.231 , if this was passed with HOUR , it would return
* 28 Mar 2002 14:00:00.000 . If this was passed with MONTH , it
* would return 1 April 2002 0:00:00.000 . < / p >
* < p > For a date in a timezone that handles the change to daylight
* saving time , rounding to Calendar . HOUR _ OF _ DAY will behave as follows .
* Suppose daylight saving time begins at 02:00 on March 30 . Rounding a
* date that crosses this time would produce the following values :
* < ul >
* < li > March 30 , 2003 01:10 rounds to March 30 , 2003 01:00 < / li >
* < li > March 30 , 2003 01:40 rounds to March 30 , 2003 03:00 < / li >
* < li > March 30 , 2003 02:10 rounds to March 30 , 2003 03:00 < / li >
* < li > March 30 , 2003 02:40 rounds to March 30 , 2003 04:00 < / li >
* < / ul >
* @ param date the date to work with , not null
* @ param field the field from { @ code Calendar } or { @ code SEMI _ MONTH }
* @ return the different rounded date , not null
* @ throws ArithmeticException if the year is over 280 million */
public static Date round ( final Date date , final int field ) { } } | validateDateNotNull ( date ) ; final Calendar gval = Calendar . getInstance ( ) ; gval . setTime ( date ) ; modify ( gval , field , ModifyType . ROUND ) ; return gval . getTime ( ) ; |
public class BaasDocument { /** * Asynchronously deletes the document with { @ code id } from { @ code collection }
* @ param collection the collection of the document
* @ param id the id of the document
* @ param handler a callback to be invoked with the result of the request
* @ return a { @ link com . baasbox . android . RequestToken } to handle the asynchronous request */
public static RequestToken delete ( String collection , String id , BaasHandler < Void > handler ) { } } | return delete ( collection , id , RequestOptions . DEFAULT , handler ) ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertTextOrientationIAxisToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class TiffITProfile { /** * Validate High - Resolution Continuous - Tone .
* @ param ifd the ifd
* @ param p the profile ( default = 0 , P1 = 1 , P2 = 2) */
private void validateIfdHC ( IFD ifd , int p ) { } } | IfdTags metadata = ifd . getMetadata ( ) ; int spp = - 1 ; if ( metadata . containsTagId ( TiffTags . getTagId ( "SampesPerPixel" ) ) ) { spp = ( int ) metadata . get ( TiffTags . getTagId ( "SampesPerPixel" ) ) . getFirstNumericValue ( ) ; } if ( p == 1 ) { checkRequiredTag ( metadata , "NewSubfileType" , 1 , new long [ ] { 0 } ) ; } checkRequiredTag ( metadata , "ImageLength" , 1 ) ; checkRequiredTag ( metadata , "ImageWidth" , 1 ) ; checkRequiredTag ( metadata , "BitsPerSample" , spp ) ; if ( p == 1 ) { checkRequiredTag ( metadata , "BitsPerSample" , 4 , new long [ ] { 8 } ) ; } checkRequiredTag ( metadata , "Compression" , 1 , new long [ ] { 32897 } ) ; checkRequiredTag ( metadata , "PhotometricInterpretation" , 1 , new long [ ] { 5 } ) ; checkRequiredTag ( metadata , "StripOffsets" , 1 ) ; if ( p == 1 || p == 2 ) { checkRequiredTag ( metadata , "Orientation" , 1 , new long [ ] { 1 } ) ; } if ( p == 1 ) { checkRequiredTag ( metadata , "SamplesPerPixel" , 1 , new long [ ] { 4 } ) ; } checkRequiredTag ( metadata , "StripBYTECount" , 1 ) ; checkRequiredTag ( metadata , "XResolution" , 1 ) ; checkRequiredTag ( metadata , "YResolution" , 1 ) ; checkRequiredTag ( metadata , "PlanarConfiguration" , 1 , new long [ ] { 1 } ) ; if ( p == 1 || p == 2 ) { checkRequiredTag ( metadata , "ResolutionUnit" , 1 , new long [ ] { 2 , 3 } ) ; checkRequiredTag ( metadata , "InkSet" , 1 , new long [ ] { 1 } ) ; checkRequiredTag ( metadata , "NumberOfInks" , 1 , new long [ ] { 4 } ) ; checkRequiredTag ( metadata , "DotRange" , 2 , new long [ ] { 0 , 255 } ) ; } checkRequiredTag ( metadata , "TransparencyIndicator" , 1 , new long [ ] { 0 , 1 } ) ; |
public class WebAuthenticatorProxy { /** * Create an instance of the BasicAuthAuthenticator .
* Protected so it can be overridden in unit tests . */
protected BasicAuthAuthenticator createBasicAuthenticator ( ) throws RegistryException { } } | SecurityService securityService = securityServiceRef . getService ( ) ; UserRegistryService userRegistryService = securityService . getUserRegistryService ( ) ; UserRegistry userRegistry = null ; if ( userRegistryService . isUserRegistryConfigured ( ) ) userRegistry = userRegistryService . getUserRegistry ( ) ; SSOCookieHelper sSOCookieHelper = webAppSecurityConfig . createSSOCookieHelper ( ) ; return new BasicAuthAuthenticator ( securityService . getAuthenticationService ( ) , userRegistry , sSOCookieHelper , webAppSecurityConfig ) ; |
public class HtmlJavaScriptUtils { /** * Prefixes the given String with " clear _ " and removes special characters
* @ param formName
* @ return String */
public static String getClearHiddenCommandFormParamsFunctionName ( String formName ) { } } | final char separatorChar = FacesContext . getCurrentInstance ( ) . getNamingContainerSeparatorChar ( ) ; if ( formName == null ) { return "'" + HtmlRendererUtils . CLEAR_HIDDEN_FIELD_FN_NAME + "_'+formName.replace(/-/g, '\\$" + separatorChar + "').replace(/" + separatorChar + "/g,'_')" ; } return JavascriptUtils . getValidJavascriptNameAsInRI ( HtmlRendererUtils . CLEAR_HIDDEN_FIELD_FN_NAME + "_" + formName . replace ( separatorChar , '_' ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.