signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Codecs { /** * Create a permutation { @ code Codec } with the given alleles .
* @ param alleles the alleles of the permutation
* @ param < T > the allele type
* @ return a new permutation { @ code Codec }
* @ throws IllegalArgumentException if the given allele array is empty
* @ throws NullPointerException if one of the alleles is { @ code null } */
public static < T > Codec < ISeq < T > , EnumGene < T > > ofPermutation ( final ISeq < ? extends T > alleles ) { } } | if ( alleles . isEmpty ( ) ) { throw new IllegalArgumentException ( "Empty allele array is not allowed." ) ; } return Codec . of ( Genotype . of ( PermutationChromosome . of ( alleles ) ) , gt -> gt . getChromosome ( ) . stream ( ) . map ( EnumGene :: getAllele ) . collect ( ISeq . toISeq ( ) ) ) ; |
public class SourceMapGeneratorV3 { /** * Sets the source code that exists in the buffer for which the
* generated code is being generated . This ensures that the source map
* accurately reflects the fact that the source is being appended to
* an existing buffer and as such , does not start at line 0 , position 0
* but rather some other line and position .
* @ param offsetLine The index of the current line being printed .
* @ param offsetIndex The column index of the current character being printed . */
@ Override public void setStartingPosition ( int offsetLine , int offsetIndex ) { } } | checkState ( offsetLine >= 0 ) ; checkState ( offsetIndex >= 0 ) ; offsetPosition = new FilePosition ( offsetLine , offsetIndex ) ; |
public class InitStrategy { /** * Returns the list of * . jpi , * . hpi and * . hpl to expand and load .
* Normally we look at { @ code $ JENKINS _ HOME / plugins / * . jpi } and * . hpi and * . hpl .
* @ return
* never null but can be empty . The list can contain different versions of the same plugin ,
* and when that happens , Jenkins will ignore all but the first one in the list . */
public List < File > listPluginArchives ( PluginManager pm ) throws IOException { } } | List < File > r = new ArrayList < > ( ) ; // the ordering makes sure that during the debugging we get proper precedence among duplicates .
// for example , while doing " mvn jpi : run " or " mvn hpi : run " on a plugin that ' s bundled with Jenkins , we want to the
// * . jpl file to override the bundled jpi / hpi file .
getBundledPluginsFromProperty ( r ) ; // similarly , we prefer * . jpi over * . hpi
listPluginFiles ( pm , ".jpl" , r ) ; // linked plugin . for debugging .
listPluginFiles ( pm , ".hpl" , r ) ; // linked plugin . for debugging . ( for backward compatibility )
listPluginFiles ( pm , ".jpi" , r ) ; // plugin jar file
listPluginFiles ( pm , ".hpi" , r ) ; // plugin jar file ( for backward compatibility )
return r ; |
public class StyleCache { /** * Set the style into the polyline options
* @ param polylineOptions polyline options
* @ param style style row
* @ return true if style was set into the polyline options */
public boolean setStyle ( PolylineOptions polylineOptions , StyleRow style ) { } } | return StyleUtils . setStyle ( polylineOptions , style , density ) ; |
public class HtmlDocletWriter { /** * Get user specified header and the footer .
* @ param header if true print the user provided header else print the
* user provided footer . */
public Content getUserHeaderFooter ( boolean header ) { } } | String content ; if ( header ) { content = replaceDocRootDir ( configuration . header ) ; } else { if ( configuration . footer . length ( ) != 0 ) { content = replaceDocRootDir ( configuration . footer ) ; } else { content = replaceDocRootDir ( configuration . header ) ; } } Content rawContent = new RawHtml ( content ) ; return rawContent ; |
public class CmsFocalPointController { /** * Handles mouse drag . < p >
* @ param nativeEvent the mousemove event */
private void handleMove ( NativeEvent nativeEvent ) { } } | Element imageElem = m_image . getElement ( ) ; int offsetX = ( ( int ) pageX ( nativeEvent ) ) - imageElem . getParentElement ( ) . getAbsoluteLeft ( ) ; int offsetY = ( ( int ) pageY ( nativeEvent ) ) - imageElem . getParentElement ( ) . getAbsoluteTop ( ) ; if ( m_coordinateTransform != null ) { CmsPoint screenPoint = new CmsPoint ( offsetX , offsetY ) ; screenPoint = m_region . constrain ( screenPoint ) ; // make sure we remain in the screen region corresponding to original image ( or crop ) .
m_pointWidget . setCenterCoordsRelativeToParent ( ( int ) screenPoint . getX ( ) , ( int ) screenPoint . getY ( ) ) ; CmsPoint logicalPoint = m_coordinateTransform . transformForward ( screenPoint ) ; m_focalPoint = logicalPoint ; } |
public class Util { void polling_configure ( ) { } } | // Send a stop polling command to thread in order not to poll devices
final DServer adm_dev = get_dserver_device ( ) ; try { adm_dev . stop_polling ( ) ; } catch ( final DevFailed e ) { Except . print_exception ( e ) ; } final Vector tmp_cl_list = adm_dev . get_class_list ( ) ; // DeviceClass
int upd ; // Create the structure used to send data to the polling thread
final DevVarLongStringArray send = new DevVarLongStringArray ( ) ; send . lvalue = new int [ 1 ] ; send . svalue = new String [ 3 ] ; // A loop on each class and each device in class
for ( int i = 0 ; i < tmp_cl_list . size ( ) ; i ++ ) { final DeviceClass dc = ( DeviceClass ) tmp_cl_list . elementAt ( i ) ; final Vector dev_list = dc . get_device_list ( ) ; // < DeviceImpl *
for ( int j = 0 ; j < dev_list . size ( ) ; j ++ ) { final DeviceImpl dev = ( DeviceImpl ) dev_list . elementAt ( j ) ; final Vector poll_cmd_list = dev . get_polled_cmd ( ) ; final Vector poll_attr_list = dev . get_polled_attr ( ) ; // Send a Add Object command to the polling thread only
// if the polling period is different than zero
for ( int k = 0 ; k < poll_cmd_list . size ( ) ; k ++ ) { if ( k == 0 ) { send . svalue [ 0 ] = dev . get_name ( ) ; send . svalue [ 1 ] = "command" ; } // Convert polling period to a number
final String strval = ( String ) poll_cmd_list . elementAt ( k + 1 ) ; upd = Integer . parseInt ( strval ) ; // Send command to the polling thread
if ( upd != 0 ) { send . lvalue [ 0 ] = upd ; send . svalue [ 2 ] = ( String ) poll_cmd_list . elementAt ( k ) ; try { adm_dev . add_obj_polling ( send , false ) ; } catch ( final DevFailed e ) { } } k ++ ; } // Send a Add Object attribute to the polling thread only
// if the polling period is different than zero
for ( int k = 0 ; k < poll_attr_list . size ( ) ; k ++ ) { if ( k == 0 ) { send . svalue [ 0 ] = dev . get_name ( ) ; send . svalue [ 1 ] = "attribute" ; } // Convert polling period to a number
final String strval = ( String ) poll_attr_list . elementAt ( k + 1 ) ; upd = Integer . parseInt ( strval ) ; // Send command to the polling thread
if ( upd != 0 ) { send . lvalue [ 0 ] = upd ; send . svalue [ 2 ] = ( String ) poll_attr_list . elementAt ( k ) ; try { adm_dev . add_obj_polling ( send , false ) ; } catch ( final DevFailed e ) { } } k ++ ; } } } // Now , start the real polling
try { adm_dev . start_polling ( ) ; } catch ( final DevFailed e ) { Except . print_exception ( e ) ; } |
public class AbstractGenerateSoyEscapingDirectiveCode { /** * Called reflectively when Ant sees { @ code < input > } to specify a file that uses the generated
* helper functions . */
public FileRef createInput ( ) { } } | FileRef ref = new FileRef ( true ) ; inputs . add ( ref ) ; return ref ; |
public class SnackbarBuilder { /** * Set the callback to be informed of the Snackbar being dismissed due to being swiped away .
* @ param callback The callback .
* @ return This instance . */
@ SuppressWarnings ( "WeakerAccess" ) public SnackbarBuilder swipeDismissCallback ( final SnackbarSwipeDismissCallback callback ) { } } | callbacks . add ( new SnackbarCallback ( ) { public void onSnackbarSwiped ( Snackbar snackbar ) { callback . onSnackbarSwiped ( snackbar ) ; } } ) ; return this ; |
public class CommerceDiscountRulePersistenceImpl { /** * Returns a range of all the commerce discount rules .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceDiscountRuleModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce discount rules
* @ param end the upper bound of the range of commerce discount rules ( not inclusive )
* @ return the range of commerce discount rules */
@ Override public List < CommerceDiscountRule > findAll ( int start , int end ) { } } | return findAll ( start , end , null ) ; |
public class DDLCompiler { /** * Add a constraint on a given table to the catalog
* @ param table The table on which the constraint will be enforced
* @ param node The XML node representing the constraint
* @ param indexReplacementMap
* @ throws VoltCompilerException */
private void addConstraintToCatalog ( Table table , VoltXMLElement node , Map < String , String > indexReplacementMap , Map < String , Index > indexMap ) throws VoltCompilerException { } } | assert node . name . equals ( "constraint" ) ; String name = node . attributes . get ( "name" ) ; String typeName = node . attributes . get ( "constrainttype" ) ; ConstraintType type = ConstraintType . valueOf ( typeName ) ; String tableName = table . getTypeName ( ) ; if ( type == ConstraintType . LIMIT ) { int tupleLimit = Integer . parseInt ( node . attributes . get ( "rowslimit" ) ) ; if ( tupleLimit < 0 ) { throw m_compiler . new VoltCompilerException ( "Invalid constraint limit number '" + tupleLimit + "'" ) ; } if ( tableLimitConstraintCounter . contains ( tableName ) ) { throw m_compiler . new VoltCompilerException ( "Too many table limit constraints for table " + tableName ) ; } else { tableLimitConstraintCounter . add ( tableName ) ; } table . setTuplelimit ( tupleLimit ) ; String deleteStmt = node . attributes . get ( "rowslimitdeletestmt" ) ; if ( deleteStmt != null ) { Statement catStmt = table . getTuplelimitdeletestmt ( ) . add ( "limit_delete" ) ; catStmt . setSqltext ( deleteStmt ) ; validateTupleLimitDeleteStmt ( catStmt ) ; } return ; } if ( type == ConstraintType . CHECK ) { String msg = "VoltDB does not enforce check constraints. " ; msg += "Constraint on table " + tableName + " will be ignored." ; m_compiler . addWarn ( msg ) ; return ; } else if ( type == ConstraintType . FOREIGN_KEY ) { String msg = "VoltDB does not enforce foreign key references and constraints. " ; msg += "Constraint on table " + tableName + " will be ignored." ; m_compiler . addWarn ( msg ) ; return ; } else if ( type == ConstraintType . MAIN ) { // should never see these
assert ( false ) ; } else if ( type == ConstraintType . NOT_NULL ) { // these get handled by table metadata inspection
return ; } else if ( type != ConstraintType . PRIMARY_KEY && type != ConstraintType . UNIQUE ) { throw m_compiler . new VoltCompilerException ( "Invalid constraint type '" + typeName + "'" ) ; } // else , create the unique index below
// primary key code is in other places as well
// The constraint is backed by an index , therefore we need to create it
// TODO : We need to be able to use indexes for foreign keys . I am purposely
// leaving those out right now because HSQLDB just makes too many of them .
Constraint catalog_const = table . getConstraints ( ) . add ( name ) ; String indexName = node . attributes . get ( "index" ) ; assert ( indexName != null ) ; // handle replacements from duplicate index pruning
if ( indexReplacementMap . containsKey ( indexName ) ) { indexName = indexReplacementMap . get ( indexName ) ; } Index catalog_index = indexMap . get ( indexName ) ; // Attach the index to the catalog constraint ( catalog _ const ) .
if ( catalog_index != null ) { catalog_const . setIndex ( catalog_index ) ; // This may be redundant .
catalog_index . setUnique ( true ) ; boolean assumeUnique = Boolean . parseBoolean ( node . attributes . get ( "assumeunique" ) ) ; catalog_index . setAssumeunique ( assumeUnique ) ; } catalog_const . setType ( type . getValue ( ) ) ; |
public class Reflector { /** * call constructor of a class with matching arguments
* @ param clazz Class to get Instance
* @ param args Arguments for the Class
* @ return invoked Instance
* @ throws PageException */
public static Object callConstructor ( Class clazz , Object [ ] args ) throws PageException { } } | args = cleanArgs ( args ) ; try { return getConstructorInstance ( clazz , args ) . invoke ( ) ; } catch ( InvocationTargetException e ) { Throwable target = e . getTargetException ( ) ; if ( target instanceof PageException ) throw ( PageException ) target ; throw Caster . toPageException ( e . getTargetException ( ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } |
public class FactoryDaoRegistry { /** * Creates a Dao for the specified object type . */
private < K , T extends Persistable < K > > CastorDao < K , T > _createDao ( final Class < T > type ) { } } | String daoClazzName = _typeMapping . get ( type . getName ( ) ) ; if ( _LOG_ . isDebugEnabled ( ) ) { _LOG_ . debug ( "creating Dao: object type=" + type + ", Dao type=" + daoClazzName ) ; } CastorDao < K , T > dao = null ; if ( daoClazzName == null ) { dao = new CastorDao < K , T > ( type ) ; } else { try { @ SuppressWarnings ( "unchecked" ) Class < CastorDao < K , T > > daoClazz = ( Class < CastorDao < K , T > > ) Class . forName ( daoClazzName ) ; dao = daoClazz . newInstance ( ) ; // throws InstantiationException , IllegalAccessException
} catch ( Exception ex ) { throw new PersistenceException ( ex ) ; } } if ( CastorDaoSupport . class . isInstance ( dao ) ) { CastorDaoSupport support = CastorDaoSupport . class . cast ( dao ) ; support . setJDOManager ( _jdoManager ) ; } return dao ; |
public class RedisClient { /** * ( non - Javadoc )
* @ see com . impetus . kundera . client . Client # close ( ) */
@ Override public void close ( ) { } } | if ( settings != null ) { settings . clear ( ) ; settings = null ; } if ( connection != null ) { connection . disconnect ( ) ; connection = null ; } reader = null ; |
public class HttpRequestTimer { /** * Start the timer with the specified timeout and return a object that can be used to track the
* state of the timer and cancel it if need be .
* @ param apacheRequest
* HTTP request this timer will abort if triggered .
* @ param requestTimeoutMillis
* A positive value here enables the timer , a non - positive value disables it and
* returns a dummy tracker task
* @ return Implementation of { @ link HttpRequestAbortTaskTrackerImpl } to query the state of the
* task and cancel it if appropriate */
public HttpRequestAbortTaskTracker startTimer ( final HttpRequestBase apacheRequest , final int requestTimeoutMillis ) { } } | if ( isTimeoutDisabled ( requestTimeoutMillis ) ) { return NoOpHttpRequestAbortTaskTracker . INSTANCE ; } else if ( executor == null ) { initializeExecutor ( ) ; } HttpRequestAbortTaskImpl timerTask = new HttpRequestAbortTaskImpl ( apacheRequest ) ; ScheduledFuture < ? > timerTaskFuture = executor . schedule ( timerTask , requestTimeoutMillis , TimeUnit . MILLISECONDS ) ; return new HttpRequestAbortTaskTrackerImpl ( timerTask , timerTaskFuture ) ; |
public class ApiKeyRealm { /** * Gets the AuthenticationInfo that matches a token . This method is only called if the info is not already
* cached by the realm , so this method does not need to perform any further caching . */
@ SuppressWarnings ( "unchecked" ) @ Override protected AuthenticationInfo doGetAuthenticationInfo ( AuthenticationToken token ) throws AuthenticationException { } } | String id ; if ( AnonymousToken . isAnonymous ( token ) ) { // Only continue if an anonymous identity has been set
if ( _anonymousId != null ) { id = _anonymousId ; } else { return null ; } } else { id = ( ( ApiKeyAuthenticationToken ) token ) . getPrincipal ( ) ; } return getUncachedAuthenticationInfoForKey ( id ) ; |
public class BoundsOnRatiosInThetaSketchedSets { /** * Gets the approximate lower bound for B over A based on a 95 % confidence interval
* @ param sketchA the sketch A
* @ param sketchB the sketch B
* @ return the approximate lower bound for B over A */
public static double getLowerBoundForBoverA ( final Sketch sketchA , final Sketch sketchB ) { } } | final double thetaA = sketchA . getTheta ( ) ; final double thetaB = sketchB . getTheta ( ) ; checkThetas ( thetaA , thetaB ) ; final int countB = sketchB . getRetainedEntries ( true ) ; final int countA = ( thetaB == thetaA ) ? sketchA . getRetainedEntries ( true ) : sketchA . getCountLessThanTheta ( thetaB ) ; if ( countA <= 0 ) { return 0 ; } return BoundsOnRatiosInSampledSets . getLowerBoundForBoverA ( countA , countB , thetaB ) ; |
public class Seconds { /** * Returns a new instance with the specified number of seconds added .
* This instance is immutable and unaffected by this method call .
* @ param seconds the amount of seconds to add , may be negative
* @ return the new period plus the specified number of seconds
* @ throws ArithmeticException if the result overflows an int */
public Seconds plus ( int seconds ) { } } | if ( seconds == 0 ) { return this ; } return Seconds . seconds ( FieldUtils . safeAdd ( getValue ( ) , seconds ) ) ; |
public class PennTreeReader { /** * Reads a single tree in standard Penn Treebank format from the
* input stream . The method supports additional parentheses around the
* tree ( an unnamed ROOT node ) so long as they are balanced . If the token stream
* ends before the current tree is complete , then the method will throw an
* < code > IOException < / code > .
* Note that the method will skip malformed trees and attempt to
* read additional trees from the input stream . It is possible , however ,
* that a malformed tree will corrupt the token stream . In this case ,
* an < code > IOException < / code > will eventually be thrown .
* @ return A single tree , or < code > null < / code > at end of token stream . */
public Tree readTree ( ) throws IOException { } } | Tree t = null ; while ( tokenizer . hasNext ( ) && t == null ) { // Setup PDA
this . currentTree = null ; this . stack = new ArrayList < Tree > ( ) ; try { t = getTreeFromInputStream ( ) ; } catch ( NoSuchElementException e ) { throw new IOException ( "End of token stream encountered before parsing could complete." ) ; } if ( t != null ) { // cdm 20100618 : Don ' t do this ! This was never the historical behavior ! ! !
// Escape empty trees e . g . ( ( ) )
// while ( t ! = null & & ( t . value ( ) = = null | | t . value ( ) . equals ( " " ) ) & & t . numChildren ( ) < = 1)
// t = t . firstChild ( ) ;
if ( t != null && treeNormalizer != null && treeFactory != null ) t = treeNormalizer . normalizeWholeTree ( t , treeFactory ) ; } } return t ; |
public class RectangularShape { /** * Sets the location and size of the framing rectangle of this shape based on the specified
* center and corner points . */
public void setFrameFromCenter ( float centerX , float centerY , float cornerX , float cornerY ) { } } | float width = Math . abs ( cornerX - centerX ) ; float height = Math . abs ( cornerY - centerY ) ; setFrame ( centerX - width , centerY - height , width * 2 , height * 2 ) ; |
public class CacheableDataProvider { /** * Execute entry processor on entry cache
* @ param entry Entry on which will be executed entry processor
* @ param entryProcessor Entry processor that must be executed
* @ param < ReturnValue > ClassType
* @ return Return value from entry processor */
public < ReturnValue > ReturnValue executeOnEntry ( @ NotNull Entry entry , @ NotNull CacheEntryProcessor < Long , Entry , ReturnValue > entryProcessor ) { } } | long key = buildHashCode ( entry ) ; if ( ! cache . containsKey ( key ) ) { List < Object > primaryKeys = getPrimaryKeys ( entry ) ; List < Entry > entries = super . fetch ( primaryKeys ) ; Optional < Entry > first = entries . stream ( ) . findFirst ( ) ; if ( first . isPresent ( ) ) { cache . putIfAbsent ( key , first . get ( ) ) ; } } return cache . invoke ( key , entryProcessor ) ; |
public class JsonPullParser { /** * Sets an Reader as the { @ code JSON } feed .
* @ param reader
* A Reader . Cannot be null . */
void setSource ( Reader reader ) { } } | if ( reader == null ) { throw new IllegalArgumentException ( "'reader' must not be null." ) ; } br = ( reader instanceof BufferedReader ) ? ( BufferedReader ) reader : new BufferedReader ( reader ) ; |
public class Serializer { /** * Write the object to the stream .
* @ param stream the stream to write the object to .
* @ param object the object to write to the stream .
* @ throws SerializationException the object could not be serialized . */
public void writeToStream ( OutputStream stream , Object object ) throws SerializationException { } } | try { mapper . writeValue ( stream , object ) ; } catch ( IOException ex ) { throw new SerializationException ( "Exception during serialization" , ex ) ; } |
public class Spinner { /** * Obtains all attributes from a specific attribute set .
* @ param attributeSet
* The attribute set , the attributes should be obtained from , as an instance of the type
* { @ link AttributeSet } or null , if no attributes should be obtained */
private void obtainStyledAttributes ( @ Nullable final AttributeSet attributeSet ) { } } | TypedArray typedArray = getContext ( ) . obtainStyledAttributes ( attributeSet , R . styleable . Spinner ) ; try { obtainHint ( typedArray ) ; obtainHintColor ( typedArray ) ; obtainSpinnerStyledAttributes ( typedArray ) ; } finally { typedArray . recycle ( ) ; } |
public class RegisterWebAppVisitorHS { /** * Load a class from class name .
* @ param clazz class of the required object
* @ param classLoader class loader to use to load the class
* @ param className class name for the class to load
* @ return class object
* @ throws NullArgumentException if any of the parameters is null
* @ throws ClassNotFoundException re - thrown
* @ throws IllegalAccessException re - thrown */
@ SuppressWarnings ( "unchecked" ) public static < T > Class < ? extends T > loadClass ( final Class < T > clazz , final ClassLoader classLoader , final String className ) throws ClassNotFoundException , IllegalAccessException { } } | NullArgumentException . validateNotNull ( clazz , "Class" ) ; NullArgumentException . validateNotNull ( classLoader , "ClassLoader" ) ; NullArgumentException . validateNotNull ( className , "Servlet Class" ) ; return ( Class < ? extends T > ) classLoader . loadClass ( className ) ; |
public class GetMetricDataResult { /** * The metrics that are returned , including the metric name , namespace , and dimensions .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setMetricDataResults ( java . util . Collection ) } or { @ link # withMetricDataResults ( java . util . Collection ) } if
* you want to override the existing values .
* @ param metricDataResults
* The metrics that are returned , including the metric name , namespace , and dimensions .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetMetricDataResult withMetricDataResults ( MetricDataResult ... metricDataResults ) { } } | if ( this . metricDataResults == null ) { setMetricDataResults ( new com . amazonaws . internal . SdkInternalList < MetricDataResult > ( metricDataResults . length ) ) ; } for ( MetricDataResult ele : metricDataResults ) { this . metricDataResults . add ( ele ) ; } return this ; |
public class CFFFontSubset { /** * Function builds the new offset array , object array and assembles the index .
* used for creating the glyph and subrs subsetted index
* @ param Offsets the offset array of the original index
* @ param Used the hashmap of the used objects
* @ param OperatorForUnusedEntries the operator inserted into the data stream for unused entries
* @ return the new index subset version
* @ throws IOException */
protected byte [ ] BuildNewIndex ( int [ ] Offsets , HashMap Used , byte OperatorForUnusedEntries ) throws IOException { } } | int unusedCount = 0 ; int Offset = 0 ; int [ ] NewOffsets = new int [ Offsets . length ] ; // Build the Offsets Array for the Subset
for ( int i = 0 ; i < Offsets . length ; ++ i ) { NewOffsets [ i ] = Offset ; // If the object in the offset is also present in the used
// HashMap then increment the offset var by its size
if ( Used . containsKey ( Integer . valueOf ( i ) ) ) { Offset += Offsets [ i + 1 ] - Offsets [ i ] ; } else { // Else the same offset is kept in i + 1.
unusedCount ++ ; } } // Offset var determines the size of the object array
byte [ ] NewObjects = new byte [ Offset + unusedCount ] ; // Build the new Object array
int unusedOffset = 0 ; for ( int i = 0 ; i < Offsets . length - 1 ; ++ i ) { int start = NewOffsets [ i ] ; int end = NewOffsets [ i + 1 ] ; NewOffsets [ i ] = start + unusedOffset ; // If start ! = End then the Object is used
// So , we will copy the object data from the font file
if ( start != end ) { // All offsets are Global Offsets relative to the beginning of the font file .
// Jump the file pointer to the start address to read from .
buf . seek ( Offsets [ i ] ) ; // Read from the buffer and write into the array at start .
buf . readFully ( NewObjects , start + unusedOffset , end - start ) ; } else { NewObjects [ start + unusedOffset ] = OperatorForUnusedEntries ; unusedOffset ++ ; } } NewOffsets [ Offsets . length - 1 ] += unusedOffset ; // Use AssembleIndex to build the index from the offset & object arrays
return AssembleIndex ( NewOffsets , NewObjects ) ; |
public class Http { /** * Makes PUT request to given URL
* @ param url url
* @ param body request body to post or null to skip
* @ param query query to append to url or null to skip
* @ param headers to include or null to skip
* @ param connectTimeOut connect time out in ms
* @ param readTimeOut read time out in ms
* @ return Response object with HTTP response code and response as String
* @ throws HttpException in case of invalid input parameters */
public static Response put ( String url , String body , Map < String , String > query , Map < String , String > headers , int connectTimeOut , int readTimeOut ) throws HttpException { } } | return execute ( "PUT" , url , body , query , headers , connectTimeOut , readTimeOut ) ; |
public class ClientState { /** * Attempts to login the given user . */
public void login ( AuthenticatedUser user ) throws AuthenticationException { } } | if ( ! user . isAnonymous ( ) && ! Auth . isExistingUser ( user . getName ( ) ) ) throw new AuthenticationException ( String . format ( "User %s doesn't exist - create it with CREATE USER query first" , user . getName ( ) ) ) ; this . user = user ; |
public class KeyEncoder { /** * Encodes the given Boolean object into exactly 1 byte for descending
* order .
* @ param value optional Boolean value to encode
* @ param dst destination for encoded bytes
* @ param dstOffset offset into destination array */
public static void encodeDesc ( Boolean value , byte [ ] dst , int dstOffset ) { } } | if ( value == null ) { dst [ dstOffset ] = NULL_BYTE_LOW ; } else { dst [ dstOffset ] = value . booleanValue ( ) ? ( byte ) 127 : ( byte ) 128 ; } |
public class QrCodeDecoderImage { /** * Reads the format bits near the corner position pattern */
private boolean readFormatRegion0 ( QrCode qr ) { } } | // set the coordinate system to the closest pp to reduce position errors
gridReader . setSquare ( qr . ppCorner , ( float ) qr . threshCorner ) ; bits . resize ( 15 ) ; bits . zero ( ) ; for ( int i = 0 ; i < 6 ; i ++ ) { read ( i , i , 8 ) ; } read ( 6 , 7 , 8 ) ; read ( 7 , 8 , 8 ) ; read ( 8 , 8 , 7 ) ; for ( int i = 0 ; i < 6 ; i ++ ) { read ( 9 + i , 8 , 5 - i ) ; } return true ; |
public class EndpointDemographicMarshaller { /** * Marshall the given parameter object . */
public void marshall ( EndpointDemographic endpointDemographic , ProtocolMarshaller protocolMarshaller ) { } } | if ( endpointDemographic == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( endpointDemographic . getAppVersion ( ) , APPVERSION_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getLocale ( ) , LOCALE_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getMake ( ) , MAKE_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getModel ( ) , MODEL_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getModelVersion ( ) , MODELVERSION_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getPlatform ( ) , PLATFORM_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getPlatformVersion ( ) , PLATFORMVERSION_BINDING ) ; protocolMarshaller . marshall ( endpointDemographic . getTimezone ( ) , TIMEZONE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AssertUtils { /** * Indicates if a class or at least one of its annotations is annotated by a given annotation .
* Notice that the classes with a package name starting with " java . lang " will be ignored .
* @ param memberDeclaringClass the class to check
* @ param candidate the annotation to find
* @ return true if the annotation is found , false otherwise */
public static boolean hasAnnotationDeep ( Class < ? > memberDeclaringClass , Class < ? extends Annotation > candidate ) { } } | if ( memberDeclaringClass . equals ( candidate ) ) { return true ; } for ( Annotation anno : memberDeclaringClass . getAnnotations ( ) ) { Class < ? extends Annotation > annoClass = anno . annotationType ( ) ; if ( ! annoClass . getPackage ( ) . getName ( ) . startsWith ( "java.lang" ) && hasAnnotationDeep ( annoClass , candidate ) ) { return true ; } } return false ; |
public class FieldCriteria { /** * Only to be used by InMemoryRepository */
public PropertyInterface getProperty ( ) { } } | if ( property == null ) { property = Properties . getPropertyByPath ( classHolder . getClazz ( ) , path ) ; } return property ; |
public class CounterManagerNotificationManager { /** * It registers the cache listeners if they aren ' t already registered .
* @ param cache The { @ link Cache } to register the listener . */
public synchronized void listenOn ( Cache < CounterKey , CounterValue > cache ) throws InterruptedException { } } | if ( ! topologyListener . registered ) { this . cache = cache ; topologyListener . register ( cache ) ; } if ( ! listenersRegistered ) { this . cache . addListener ( valueListener , CounterKeyFilter . getInstance ( ) ) ; listenersRegistered = true ; } |
public class PlayerServlet { /** * { @ inheritDoc } */
@ Override protected void doBye ( SipServletRequest request ) throws ServletException , IOException { } } | logger . info ( "MediaPlaybackServlet: Got BYE request:\n" + request ) ; isBye = true ; MediaSession mediaSession = ( MediaSession ) request . getSession ( ) . getAttribute ( "MEDIA_SESSION" ) ; mediaSession . release ( ) ; request . getSession ( ) . removeAttribute ( "MEDIA_SESSION" ) ; SipServletResponse sipServletResponse = request . createResponse ( 200 ) ; sipServletResponse . send ( ) ; // releasing the media connection |
public class TextSimilarity { /** * 词列表1和词列表2的相似度分值
* @ param words1 词列表1
* @ param words2 词列表2
* @ return 相似度分值 */
@ Override public double similarScore ( List < Word > words1 , List < Word > words2 ) { } } | if ( words1 == null || words2 == null ) { // 只要有一个文本为null , 规定相似度分值为0 , 表示完全不相等
return 0.0 ; } if ( words1 . isEmpty ( ) && words2 . isEmpty ( ) ) { // 如果两个文本都为空 , 规定相似度分值为1 , 表示完全相等
return 1.0 ; } if ( words1 . isEmpty ( ) || words2 . isEmpty ( ) ) { // 如果一个文本为空 , 另一个不为空 , 规定相似度分值为0 , 表示完全不相等
return 0.0 ; } // 输出词列表信息
if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "词列表1:" ) ; LOGGER . debug ( "\t" + words1 ) ; LOGGER . debug ( "词列表2:" ) ; LOGGER . debug ( "\t" + words2 ) ; } double score = scoreImpl ( words1 , words2 ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "分值:" + score ) ; } score = ( int ) ( score * 1000000 + 0.5 ) / ( double ) 1000000 ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "取六位小数,四舍五入,分值:" + score ) ; } return score ; |
public class SimpleBase { /** * Creates a new iterator for traversing through a submatrix inside this matrix . It can be traversed
* by row or by column . Range of elements is inclusive , e . g . minRow = 0 and maxRow = 1 will include rows
* 0 and 1 . The iteration starts at ( minRow , minCol ) and ends at ( maxRow , maxCol )
* @ param rowMajor true means it will traverse through the submatrix by row first , false by columns .
* @ param minRow first row it will start at .
* @ param minCol first column it will start at .
* @ param maxRow last row it will stop at .
* @ param maxCol last column it will stop at .
* @ return A new MatrixIterator */
public DMatrixIterator iterator ( boolean rowMajor , int minRow , int minCol , int maxRow , int maxCol ) { } } | return new DMatrixIterator ( ( DMatrixRMaj ) mat , rowMajor , minRow , minCol , maxRow , maxCol ) ; |
public class NetworkConfig { /** * Creates a new NetworkConfig instance configured with details supplied in YAML format
* @ param configStream A stream opened on a YAML document containing network configuration details
* @ return A new NetworkConfig instance
* @ throws InvalidArgumentException */
public static NetworkConfig fromYamlStream ( InputStream configStream ) throws InvalidArgumentException , NetworkConfigurationException { } } | logger . trace ( "NetworkConfig.fromYamlStream..." ) ; // Sanity check
if ( configStream == null ) { throw new InvalidArgumentException ( "configStream must be specified" ) ; } Yaml yaml = new Yaml ( ) ; @ SuppressWarnings ( "unchecked" ) Map < String , Object > map = yaml . load ( configStream ) ; JsonObjectBuilder builder = Json . createObjectBuilder ( map ) ; JsonObject jsonConfig = builder . build ( ) ; return fromJsonObject ( jsonConfig ) ; |
public class BasePortletLayoutStatisticsController { /** * Select the first portlet name by default for the form */
protected void selectFormDefaultPortlet ( final F report ) { } } | final Set < AggregatedPortletMapping > portlets = this . getPortlets ( ) ; if ( ! portlets . isEmpty ( ) ) { report . getPortlets ( ) . add ( portlets . iterator ( ) . next ( ) . getFname ( ) ) ; } |
public class ReflectionToStringBuilder { /** * Returns whether or not to append the given < code > Field < / code > .
* < ul >
* < li > Transient fields are appended only if { @ link # isAppendTransients ( ) } returns < code > true < / code > .
* < li > Static fields are appended only if { @ link # isAppendStatics ( ) } returns < code > true < / code > .
* < li > Inner class fields are not appended . < / li >
* < / ul >
* @ param field
* The Field to test .
* @ return Whether or not to append the given < code > Field < / code > . */
protected boolean accept ( final Field field ) { } } | if ( field . getName ( ) . indexOf ( ClassUtils . INNER_CLASS_SEPARATOR_CHAR ) != - 1 ) { // Reject field from inner class .
return false ; } if ( Modifier . isTransient ( field . getModifiers ( ) ) && ! this . isAppendTransients ( ) ) { // Reject transient fields .
return false ; } if ( Modifier . isStatic ( field . getModifiers ( ) ) && ! this . isAppendStatics ( ) ) { // Reject static fields .
return false ; } if ( this . excludeFieldNames != null && Arrays . binarySearch ( this . excludeFieldNames , field . getName ( ) ) >= 0 ) { // Reject fields from the getExcludeFieldNames list .
return false ; } return ! field . isAnnotationPresent ( ToStringExclude . class ) ; |
public class SegmentsUtil { /** * Find equal segments2 in segments1.
* @ param segments1 segs to find .
* @ param segments2 sub segs .
* @ return Return the found offset , return - 1 if not find . */
public static int find ( MemorySegment [ ] segments1 , int offset1 , int numBytes1 , MemorySegment [ ] segments2 , int offset2 , int numBytes2 ) { } } | if ( numBytes2 == 0 ) { // quick way 1.
return offset1 ; } if ( inFirstSegment ( segments1 , offset1 , numBytes1 ) && inFirstSegment ( segments2 , offset2 , numBytes2 ) ) { byte first = segments2 [ 0 ] . get ( offset2 ) ; int end = numBytes1 - numBytes2 + offset1 ; for ( int i = offset1 ; i <= end ; i ++ ) { // quick way 2 : equal first byte .
if ( segments1 [ 0 ] . get ( i ) == first && segments1 [ 0 ] . equalTo ( segments2 [ 0 ] , i , offset2 , numBytes2 ) ) { return i ; } } return - 1 ; } else { return findInMultiSegments ( segments1 , offset1 , numBytes1 , segments2 , offset2 , numBytes2 ) ; } |
public class ReconciliationOrderReport { /** * Sets the proposalGrossBillableRevenueManualAdjustment value for this ReconciliationOrderReport .
* @ param proposalGrossBillableRevenueManualAdjustment * If this reconciliation data is for a { @ link Proposal } , then
* this contains the gross revenue
* manual adjustment for that proposal . Otherwise , this
* is { @ code null } .
* This value is editable . */
public void setProposalGrossBillableRevenueManualAdjustment ( com . google . api . ads . admanager . axis . v201808 . Money proposalGrossBillableRevenueManualAdjustment ) { } } | this . proposalGrossBillableRevenueManualAdjustment = proposalGrossBillableRevenueManualAdjustment ; |
public class NumericParameterTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setCurrencyUnit ( String newCurrencyUnit ) { } } | String oldCurrencyUnit = currencyUnit ; currencyUnit = newCurrencyUnit ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . NUMERIC_PARAMETER_TYPE__CURRENCY_UNIT , oldCurrencyUnit , currencyUnit ) ) ; |
public class MmtfActions { /** * Get a Biojava structure from the mmtf REST service .
* @ param pdbId the PDB code of the required structure
* @ return a Structure object relating to the input byte array
* @ throws IOException */
public static Structure readFromWeb ( String pdbId ) throws IOException { } } | // Get the reader - this is the bit that people need to implement .
MmtfStructureReader mmtfStructureReader = new MmtfStructureReader ( ) ; // Do the inflation
new StructureDataToAdapter ( new GenericDecoder ( ReaderUtils . getDataFromUrl ( pdbId ) ) , mmtfStructureReader ) ; // Get the structue
return mmtfStructureReader . getStructure ( ) ; |
public class AbstractSecureSession { /** * The initial handshake is a procedure by which the two peers exchange
* communication parameters until an SecureSession is established . Application
* data can not be sent during this phase .
* @ param receiveBuffer Encrypted message
* @ return True means handshake success
* @ throws IOException The I / O exception */
protected boolean doHandshake ( ByteBuffer receiveBuffer ) throws IOException { } } | if ( ! session . isOpen ( ) ) { close ( ) ; return ( initialHSComplete = false ) ; } if ( initialHSComplete ) { return true ; } switch ( initialHSStatus ) { case NOT_HANDSHAKING : case FINISHED : { handshakeFinish ( ) ; return initialHSComplete ; } case NEED_UNWRAP : doHandshakeReceive ( receiveBuffer ) ; if ( initialHSStatus != SSLEngineResult . HandshakeStatus . NEED_WRAP ) break ; case NEED_WRAP : doHandshakeResponse ( ) ; break ; default : // NEED _ TASK
throw new SecureNetException ( "Invalid Handshaking State" + initialHSStatus ) ; } return initialHSComplete ; |
public class CmsDefaultXmlContentHandler { /** * Adds search settings as defined by ' simple ' syntax in fields . < p >
* @ param contentDef the content definition
* @ param name the element name
* @ param value the search setting value
* @ throws CmsXmlException if something goes wrong */
protected void addSimpleSearchSetting ( CmsXmlContentDefinition contentDef , String name , String value ) throws CmsXmlException { } } | if ( "false" . equalsIgnoreCase ( value ) ) { addSearchSetting ( contentDef , name , Boolean . FALSE ) ; } else if ( "true" . equalsIgnoreCase ( value ) ) { addSearchSetting ( contentDef , name , Boolean . TRUE ) ; } else { StringTemplate template = m_searchTemplateGroup . getInstanceOf ( value ) ; if ( ( template != null ) && ( template . getFormalArgument ( "name" ) != null ) ) { template . setAttribute ( "name" , CmsEncoder . escapeXml ( name ) ) ; String xml = template . toString ( ) ; try { Document doc = DocumentHelper . parseText ( xml ) ; initSearchSettings ( doc . getRootElement ( ) , contentDef ) ; } catch ( DocumentException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } |
public class sdcard { /** * Counts the size of a directory recursively ( sum of the length of all files ) .
* @ param directory directory to inspect , must not be null
* @ return size of directory in bytes , 0 if directory is security restricted */
public static long getDirectorySize ( File directory ) { } } | if ( ! directory . exists ( ) ) { throw new IllegalArgumentException ( directory + " does not exist" ) ; } if ( ! directory . isDirectory ( ) ) { throw new IllegalArgumentException ( directory + " is not a directory" ) ; } long size = 0 ; File [ ] files = directory . listFiles ( ) ; if ( files == null ) { // null if security restricted
return 0L ; } for ( int i = 0 ; i < files . length ; i ++ ) { File file = files [ i ] ; if ( file . isDirectory ( ) ) { size += getDirectorySize ( file ) ; } else { size += file . length ( ) ; } } return size ; |
public class PagesIndexOrdering { /** * note this code was forked from Fastutils */
@ SuppressWarnings ( "InnerAssignment" ) private void quickSort ( PagesIndex pagesIndex , int from , int to ) { } } | int len = to - from ; // Insertion sort on smallest arrays
if ( len < SMALL ) { for ( int i = from ; i < to ; i ++ ) { for ( int j = i ; j > from && ( comparator . compareTo ( pagesIndex , j - 1 , j ) > 0 ) ; j -- ) { pagesIndex . swap ( j , j - 1 ) ; } } return ; } // Choose a partition element , v
int m = from + len / 2 ; // Small arrays , middle element
if ( len > SMALL ) { int l = from ; int n = to - 1 ; if ( len > MEDIUM ) { // Big arrays , pseudomedian of 9
int s = len / 8 ; l = median3 ( pagesIndex , l , l + s , l + 2 * s ) ; m = median3 ( pagesIndex , m - s , m , m + s ) ; n = median3 ( pagesIndex , n - 2 * s , n - s , n ) ; } m = median3 ( pagesIndex , l , m , n ) ; // Mid - size , med of 3
} // int v = x [ m ] ;
int a = from ; int b = a ; int c = to - 1 ; // Establish Invariant : v * ( < v ) * ( > v ) * v *
int d = c ; while ( true ) { int comparison ; while ( b <= c && ( ( comparison = comparator . compareTo ( pagesIndex , b , m ) ) <= 0 ) ) { if ( comparison == 0 ) { if ( a == m ) { m = b ; // moving target ; DELTA to JDK ! ! !
} else if ( b == m ) { m = a ; // moving target ; DELTA to JDK ! ! !
} pagesIndex . swap ( a ++ , b ) ; } b ++ ; } while ( c >= b && ( ( comparison = comparator . compareTo ( pagesIndex , c , m ) ) >= 0 ) ) { if ( comparison == 0 ) { if ( c == m ) { m = d ; // moving target ; DELTA to JDK ! ! !
} else if ( d == m ) { m = c ; // moving target ; DELTA to JDK ! ! !
} pagesIndex . swap ( c , d -- ) ; } c -- ; } if ( b > c ) { break ; } if ( b == m ) { m = d ; // moving target ; DELTA to JDK ! ! !
} else if ( c == m ) { m = c ; // moving target ; DELTA to JDK ! ! !
} pagesIndex . swap ( b ++ , c -- ) ; } // Swap partition elements back to middle
int s ; int n = to ; s = Math . min ( a - from , b - a ) ; vectorSwap ( pagesIndex , from , b - s , s ) ; s = Math . min ( d - c , n - d - 1 ) ; vectorSwap ( pagesIndex , b , n - s , s ) ; // Recursively sort non - partition - elements
if ( ( s = b - a ) > 1 ) { quickSort ( pagesIndex , from , from + s ) ; } if ( ( s = d - c ) > 1 ) { quickSort ( pagesIndex , n - s , n ) ; } |
public class ESFilterBuilder { /** * Gets the between boundary values .
* @ param boundExpression
* the bound expression
* @ return the between boundry values */
private String getBetweenBoundaryValues ( Expression boundExpression ) { } } | if ( boundExpression instanceof IdentificationVariable || boundExpression instanceof NumericLiteral || boundExpression instanceof InputParameter ) { Object value = ( boundExpression instanceof InputParameter ) ? kunderaQuery . getParametersMap ( ) . get ( ( boundExpression ) . toParsedText ( ) ) : boundExpression . toParsedText ( ) ; return value . toString ( ) ; } else if ( boundExpression instanceof AdditionExpression ) { String leftValue = checkInputParameter ( ( ( AdditionExpression ) boundExpression ) . getLeftExpression ( ) ) ; String rightValue = checkInputParameter ( ( ( AdditionExpression ) boundExpression ) . getRightExpression ( ) ) ; return new Integer ( Integer . parseInt ( leftValue ) + Integer . parseInt ( rightValue ) ) . toString ( ) ; } else if ( boundExpression instanceof SubtractionExpression ) { String leftValue = checkInputParameter ( ( ( SubtractionExpression ) boundExpression ) . getLeftExpression ( ) ) ; String rightValue = checkInputParameter ( ( ( SubtractionExpression ) boundExpression ) . getRightExpression ( ) ) ; return new Integer ( Integer . parseInt ( leftValue ) - Integer . parseInt ( rightValue ) ) . toString ( ) ; } else if ( boundExpression instanceof MultiplicationExpression ) { String leftValue = checkInputParameter ( ( ( MultiplicationExpression ) boundExpression ) . getLeftExpression ( ) ) ; String rightValue = checkInputParameter ( ( ( MultiplicationExpression ) boundExpression ) . getRightExpression ( ) ) ; return new Integer ( Integer . parseInt ( leftValue ) * Integer . parseInt ( rightValue ) ) . toString ( ) ; } else if ( boundExpression instanceof DivisionExpression ) { String leftValue = checkInputParameter ( ( ( DivisionExpression ) boundExpression ) . getLeftExpression ( ) ) ; String rightValue = checkInputParameter ( ( ( DivisionExpression ) boundExpression ) . getRightExpression ( ) ) ; return new Integer ( Integer . parseInt ( leftValue ) / Integer . parseInt ( rightValue ) ) . toString ( ) ; } return null ; |
public class ServletBeanContext { /** * Pushes the current request context onto the stack */
private synchronized void pushRequestContext ( ServletContext context , ServletRequest req , ServletResponse resp ) { } } | getRequestStack ( ) . push ( new RequestContext ( context , req , resp ) ) ; |
public class QueryExecutorImpl { /** * Wait until our lock is released . Execution of a single synchronized method can then continue
* without further ado . Must be called at beginning of each synchronized public method . */
private void waitOnLock ( ) throws PSQLException { } } | while ( lockedFor != null ) { try { this . wait ( ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; throw new PSQLException ( GT . tr ( "Interrupted while waiting to obtain lock on database connection" ) , PSQLState . OBJECT_NOT_IN_STATE , ie ) ; } } |
public class DateUtils { /** * Warning : relies on default timezone !
* @ since 7.6 */
public static String formatDate ( Instant d ) { } } | return d . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) . toString ( ) ; |
public class SplitOperation { /** * Performs the actual splitting operation . Throws a TransformationOperationException if the index string is not a
* number or causes an IndexOutOfBoundsException . */
private String performSplitting ( String source , String splitString , String indexString ) throws TransformationOperationException { } } | try { Integer index = Integer . parseInt ( indexString ) ; return source . split ( splitString ) [ index ] ; } catch ( NumberFormatException e ) { throw new TransformationOperationException ( "The given result index parameter is not a number" ) ; } catch ( IndexOutOfBoundsException e ) { throw new TransformationOperationException ( "The split result does not have that much results for the index parameter" ) ; } |
public class ReverseDbTreeNode { /** * Loads the children of this TreeNode . If another Thread is already active on this node the method returns
* without doing anything ( if a separate Thread is started the method returns anyway , but the Thread might
* do nothing ) .
* @ param recursive If true , all children down to the leaf node are retrieved
* @ param replace If true the children are loaded unconditionally . If false the
* retrieval is only done if the node has not been populated before .
* @ param inNewThread if true the load is done in a new thread . */
public void load ( final boolean recursive , final boolean replace , final boolean inNewThread ) { } } | if ( inNewThread ) { new Thread ( ) { public void run ( ) { load ( recursive , replace , false ) ; } } . start ( ) ; return ; } if ( ! populationInProgress ) { synchronized ( this . populationLock ) { this . populationInProgress = true ; if ( replace || ! this . isFilled ) { this . isFilled = _load ( ) ; } this . populationInProgress = false ; } if ( ! recursive ) this . getDbMetaTreeModel ( ) . setStatusBarMessage ( "Done" ) ; } if ( recursive ) { java . util . Enumeration e = this . children ( ) ; while ( e . hasMoreElements ( ) ) { Object o = e . nextElement ( ) ; if ( o instanceof ReverseDbTreeNode ) ( ( ReverseDbTreeNode ) o ) . load ( recursive , replace , false ) ; } this . getDbMetaTreeModel ( ) . setStatusBarMessage ( "Done" ) ; } |
public class RabbitmqClusterContext { /** * 呼出元別 、 接続先RabbitMQプロセスの定義マップを検証して設定する 。
* @ param connectionProcessMap the connectionProcessMap to set
* @ throws RabbitmqCommunicateException 接続先RabbitMQプロセスがRabbitMQプロセス一覧に定義されていない場合 */
public void setConnectionProcessMap ( Map < String , String > connectionProcessMap ) throws RabbitmqCommunicateException { } } | Map < String , String > tempConnectionProcessMap = connectionProcessMap ; if ( connectionProcessMap == null ) { tempConnectionProcessMap = new HashMap < String , String > ( ) ; } validateProcessReference ( this . mqProcessList , tempConnectionProcessMap ) ; this . connectionProcessMap = tempConnectionProcessMap ; |
public class CopiedOverriddenMethod { /** * compares two code blocks to see if they are equal with regard to instructions and field accesses
* @ param child
* the first code block
* @ param parent
* the second code block
* @ return whether the code blocks are the same */
private boolean codeEquals ( Code child , Code parent ) { } } | if ( parent == null ) { return false ; } byte [ ] childBytes = child . getCode ( ) ; byte [ ] parentBytes = parent . getCode ( ) ; if ( ( childBytes == null ) || ( parentBytes == null ) ) { return false ; } if ( childBytes . length != parentBytes . length ) { return false ; } InstructionHandle [ ] childihs = new InstructionList ( childBytes ) . getInstructionHandles ( ) ; InstructionHandle [ ] parentihs = new InstructionList ( parentBytes ) . getInstructionHandles ( ) ; if ( childihs . length != parentihs . length ) { return false ; } for ( int i = 0 ; i < childihs . length ; i ++ ) { InstructionHandle childih = childihs [ i ] ; InstructionHandle parentih = parentihs [ i ] ; Instruction childin = childih . getInstruction ( ) ; Instruction parentin = parentih . getInstruction ( ) ; if ( ! childin . getName ( ) . equals ( parentin . getName ( ) ) ) { return false ; } if ( childin instanceof FieldInstruction ) { String childFName = ( ( FieldInstruction ) childin ) . getFieldName ( childPoolGen ) ; String parentFName = ( ( FieldInstruction ) parentin ) . getFieldName ( parentPoolGen ) ; if ( ! childFName . equals ( parentFName ) ) { return false ; } String childFSig = ( ( FieldInstruction ) childin ) . getSignature ( childPoolGen ) ; String parentFSig = ( ( FieldInstruction ) parentin ) . getSignature ( parentPoolGen ) ; if ( ! childFSig . equals ( parentFSig ) ) { return false ; } if ( childFSig . startsWith ( Values . SIG_QUALIFIED_CLASS_PREFIX ) || childFSig . startsWith ( Values . SIG_ARRAY_PREFIX ) ) { ReferenceType childRefType = ( ( FieldInstruction ) childin ) . getReferenceType ( childPoolGen ) ; ReferenceType parentRefType = ( ( FieldInstruction ) parentin ) . getReferenceType ( parentPoolGen ) ; if ( ! childRefType . getSignature ( ) . equals ( parentRefType . getSignature ( ) ) ) { return false ; } } } else if ( childin instanceof InvokeInstruction ) { String childClassName = ( ( InvokeInstruction ) childin ) . getClassName ( childPoolGen ) ; String parentClassName = ( ( InvokeInstruction ) parentin ) . getClassName ( parentPoolGen ) ; if ( ! childClassName . equals ( parentClassName ) ) { return false ; } String childMethodName = ( ( InvokeInstruction ) childin ) . getMethodName ( childPoolGen ) ; String parentMethodName = ( ( InvokeInstruction ) parentin ) . getMethodName ( parentPoolGen ) ; if ( ! childMethodName . equals ( parentMethodName ) ) { return false ; } String childSignature = ( ( InvokeInstruction ) childin ) . getSignature ( childPoolGen ) ; String parentSignature = ( ( InvokeInstruction ) parentin ) . getSignature ( parentPoolGen ) ; if ( ! childSignature . equals ( parentSignature ) ) { return false ; } } else if ( childin instanceof LDC ) { Type childType = ( ( LDC ) childin ) . getType ( childPoolGen ) ; Type parentType = ( ( LDC ) parentin ) . getType ( parentPoolGen ) ; if ( ! childType . equals ( parentType ) ) { return false ; } Object childValue = ( ( LDC ) childin ) . getValue ( childPoolGen ) ; Object parentValue = ( ( LDC ) parentin ) . getValue ( parentPoolGen ) ; if ( childValue instanceof ConstantClass ) { ConstantClass childClass = ( ConstantClass ) childValue ; ConstantClass parentClass = ( ConstantClass ) parentValue ; if ( ! childClass . getBytes ( childPoolGen . getConstantPool ( ) ) . equals ( parentClass . getBytes ( parentPoolGen . getConstantPool ( ) ) ) ) { return false ; } } else if ( ! childValue . equals ( parentValue ) ) { return false ; } // TODO : Other Constant types
} else if ( childin instanceof LDC2_W ) { Type childType = ( ( LDC2_W ) childin ) . getType ( childPoolGen ) ; Type parentType = ( ( LDC2_W ) parentin ) . getType ( parentPoolGen ) ; if ( ! childType . equals ( parentType ) ) { return false ; } Object childValue = ( ( LDC2_W ) childin ) . getValue ( childPoolGen ) ; Object parentValue = ( ( LDC2_W ) parentin ) . getValue ( parentPoolGen ) ; if ( ! childValue . equals ( parentValue ) ) { return false ; } } else { if ( ! childin . equals ( parentin ) ) { return false ; } } } return true ; |
public class Surface { /** * Starts a series of drawing commands that are clipped to the specified rectangle ( in view
* coordinates , not OpenGL coordinates ) . Thus must be followed by a call to { @ link # endClipped }
* when the clipped drawing commands are done .
* @ return whether the resulting clip rectangle is non - empty . < em > Note : < / em > the caller may wish
* to skip their drawing if this returns false , but they must still call { @ link # endClipped } . */
public boolean startClipped ( int x , int y , int width , int height ) { } } | batch . flush ( ) ; // flush any pending unclipped calls
Rectangle r = pushScissorState ( x , target . flip ( ) ? target . height ( ) - y - height : y , width , height ) ; batch . gl . glScissor ( r . x , r . y , r . width , r . height ) ; if ( scissorDepth == 1 ) batch . gl . glEnable ( GL20 . GL_SCISSOR_TEST ) ; batch . gl . checkError ( "startClipped" ) ; return ! r . isEmpty ( ) ; |
public class LinearClassifier { /** * Returns indices of labels
* @ param labels - Set of labels to get indicies
* @ return Set of indicies */
protected Set < Integer > getLabelIndices ( Set < L > labels ) { } } | Set < Integer > iLabels = new HashSet < Integer > ( ) ; for ( L label : labels ) { int iLabel = labelIndex . indexOf ( label ) ; iLabels . add ( iLabel ) ; if ( iLabel < 0 ) throw new IllegalArgumentException ( "Unknown label " + label ) ; } return iLabels ; |
public class OAuthApi { /** * Exchange an auth token from the old Authentication API , to an OAuth access token .
* Calling this method will delete the auth token used to make the request .
* < br >
* This method should be called when upgrading your app to use OAuth . Call this method prior to initializing
* Jinx , and then use the OAuthExchangedToken to initialize Jinx .
* @ param inputStream stream to the legacy token properties .
* @ return object containing the OAuth Access Token data .
* @ throws JinxException if any parameter is null , or if there are any errors .
* @ see < a href = " http : / / www . flickr . com / services / api / flickr . auth . oauth . getAccessToken . html " > flickr . auth . oauth . getAccessToken < / a > */
public OAuthAccessToken getAccessToken ( InputStream inputStream ) throws JinxException { } } | JinxUtils . validateParams ( inputStream ) ; Properties legacyTokenProperties = loadLegacyTokenProperties ( inputStream ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.auth.oauth.getAccessToken" ) ; params . put ( "api_key" , jinx . getApiKey ( ) ) ; params . put ( "auth_token" , legacyTokenProperties . getProperty ( "token" ) ) ; params . put ( "format" , "json" ) ; params . put ( "nojsoncallback" , "1" ) ; params . put ( "api_sig" , sign ( params , jinx . getApiSecret ( ) ) ) ; StringBuilder sb = new StringBuilder ( JinxConstants . REST_ENDPOINT ) . append ( '?' ) ; for ( String key : params . keySet ( ) ) { sb . append ( key ) . append ( '=' ) . append ( params . get ( key ) ) . append ( '&' ) ; } sb . deleteCharAt ( sb . lastIndexOf ( "&" ) ) ; BufferedReader in = null ; StringBuilder json = new StringBuilder ( ) ; try { HttpURLConnection request = ( HttpURLConnection ) new URL ( sb . toString ( ) ) . openConnection ( ) ; request . connect ( ) ; in = new BufferedReader ( new InputStreamReader ( request . getInputStream ( ) ) ) ; String line ; while ( ( line = in . readLine ( ) ) != null ) { json . append ( line ) ; } } catch ( Exception e ) { throw new JinxException ( "Error when converting legacy token to OAuth token." , e ) ; } finally { JinxUtils . close ( in ) ; } OAuthExchangedToken exchangedToken = new Gson ( ) . fromJson ( json . toString ( ) , OAuthExchangedToken . class ) ; if ( ! exchangedToken . getStat ( ) . equals ( "ok" ) ) { throw new JinxException ( "Flickr reported an error." , null , exchangedToken ) ; } OAuthAccessToken oAuthAccessToken = new OAuthAccessToken ( ) ; oAuthAccessToken . setOauthToken ( exchangedToken . getOAuthToken ( ) ) ; oAuthAccessToken . setOauthTokenSecret ( exchangedToken . getOAuthTokenSecret ( ) ) ; oAuthAccessToken . setUsername ( legacyTokenProperties . getProperty ( "username" ) ) ; oAuthAccessToken . setFullname ( legacyTokenProperties . getProperty ( "fullname" ) ) ; oAuthAccessToken . setNsid ( legacyTokenProperties . getProperty ( "nsid" ) ) ; return oAuthAccessToken ; |
public class SingularityMesosSchedulerClient { /** * Sent by the scheduler to query the status of non - terminal tasks . This causes the master to send back UPDATE
* events for each task in the list . Tasks that are no longer known to Mesos will result in TASK _ LOST updates .
* If the list of tasks is empty , master will send UPDATE events for all currently known tasks of the framework .
* @ param tasks */
public void reconcile ( List < Reconcile . Task > tasks ) { } } | Builder reconsile = build ( ) . setReconcile ( Reconcile . newBuilder ( ) . addAllTasks ( tasks ) ) ; sendCall ( reconsile , Type . RECONCILE ) ; |
public class Packer { /** * Add anchor = WEST to the constraints for the current component if how = =
* true remove it if false . */
public Packer setAnchorWest ( final boolean how ) { } } | if ( how == true ) { gc . anchor = GridBagConstraints . WEST ; } else { gc . anchor &= ~ GridBagConstraints . WEST ; } setConstraints ( comp , gc ) ; return this ; |
public class SwingGroovyMethods { /** * Overloads the left shift operator to provide an easy way to add
* nodes to a MutableTreeNode . < p >
* @ param self a MutableTreeNode
* @ param node a node to be added to the treeNode .
* @ return same treeNode , after the value was added to it .
* @ since 1.6.4 */
public static MutableTreeNode leftShift ( MutableTreeNode self , MutableTreeNode node ) { } } | self . insert ( node , self . getChildCount ( ) ) ; return self ; |
public class IQ2DatalogTranslatorImpl { /** * Move ORDER BY above the highest construction node ( required by Datalog ) */
private IQ liftOrderBy ( IQ iq ) { } } | IQTree topNonQueryModifierTree = getFirstNonQueryModifierTree ( iq ) ; if ( ( topNonQueryModifierTree instanceof UnaryIQTree ) && ( ( ( UnaryIQTree ) topNonQueryModifierTree ) . getChild ( ) . getRootNode ( ) instanceof OrderByNode ) ) { return orderByLifter . liftOrderBy ( iq ) ; } return iq ; |
public class ListOperation { /** * List action must not try to configure selected item . Instead if list
* detect that the selected item is new ( thay may happen when user start new
* operation but desist ) , selected item must be se to null .
* @ param ctx PM Context */
@ Override public void configureSelected ( PMContext ctx ) throws NumberFormatException , PMException { } } | if ( ctx . getEntityContainer ( ) . isSelectedNew ( ) ) { ctx . getEntityContainer ( ) . setSelected ( null ) ; } else { super . configureSelected ( ctx ) ; } |
public class Preconditions { /** * Tests if the newBackupCount count is valid .
* @ param newBackupCount the number of sync backups
* @ param currentAsyncBackupCount the current number of async backups
* @ return the newBackupCount
* @ throws java . lang . IllegalArgumentException if newBackupCount is smaller than 0 , or larger than the maximum
* number of backups . */
public static int checkBackupCount ( int newBackupCount , int currentAsyncBackupCount ) { } } | if ( newBackupCount < 0 ) { throw new IllegalArgumentException ( "backup-count can't be smaller than 0" ) ; } if ( currentAsyncBackupCount < 0 ) { throw new IllegalArgumentException ( "async-backup-count can't be smaller than 0" ) ; } if ( newBackupCount > MAX_BACKUP_COUNT ) { throw new IllegalArgumentException ( "backup-count can't be larger than than " + MAX_BACKUP_COUNT ) ; } if ( newBackupCount + currentAsyncBackupCount > MAX_BACKUP_COUNT ) { throw new IllegalArgumentException ( "the sum of backup-count and async-backup-count can't be larger than than " + MAX_BACKUP_COUNT ) ; } return newBackupCount ; |
public class LookupReferencesManager { /** * Returns a list of lookups from the snapshot if the lookupsnapshottaker is configured . If it ' s not available ,
* returns null .
* @ return list of LookupBean objects , or null */
@ Nullable private List < LookupBean > getLookupListFromSnapshot ( ) { } } | if ( lookupSnapshotTaker != null ) { return lookupSnapshotTaker . pullExistingSnapshot ( lookupListeningAnnouncerConfig . getLookupTier ( ) ) ; } return null ; |
public class FileListCacheValue { /** * Adds the filename from the set if it exists
* @ param fileName
* @ return true if the set was mutated */
public boolean add ( String fileName ) { } } | writeLock . lock ( ) ; try { return filenames . add ( fileName ) ; } finally { writeLock . unlock ( ) ; } |
public class Timestamp { /** * Creates a Timestamp instance from the given string . String is in the RFC 3339 format without
* the timezone offset ( always ends in " Z " ) . */
public static Timestamp parseTimestamp ( String timestamp ) { } } | TemporalAccessor temporalAccessor = timestampParser . parse ( timestamp ) ; Instant instant = Instant . from ( temporalAccessor ) ; return ofTimeSecondsAndNanos ( instant . getEpochSecond ( ) , instant . getNano ( ) ) ; |
public class EnumTypeConverter { /** * Enforces that obj is a String contained in the Enum ' s values list */
@ SuppressWarnings ( "unchecked" ) public Object unmarshal ( Object obj ) throws RpcException { } } | if ( obj == null ) { return returnNullIfOptional ( ) ; } else if ( obj . getClass ( ) != String . class ) { String msg = "'" + obj + "' enum must be String, got: " + obj . getClass ( ) . getSimpleName ( ) ; throw RpcException . Error . INVALID_PARAMS . exc ( msg ) ; } else if ( e . getValues ( ) . contains ( ( String ) obj ) ) { try { Class clz = getTypeClass ( ) ; return java . lang . Enum . valueOf ( clz , ( String ) obj ) ; } catch ( Exception e ) { String msg = "Could not set enum value '" + obj + "' - " + e . getClass ( ) . getSimpleName ( ) + " - " + e . getMessage ( ) ; throw RpcException . Error . INTERNAL . exc ( msg ) ; } } else { String msg = "'" + obj + "' is not in enum: " + e . getValues ( ) ; throw RpcException . Error . INVALID_PARAMS . exc ( msg ) ; } |
public class CmsDriverManager { /** * Reads the aliases which point to a given structure id . < p >
* @ param dbc the current database context
* @ param project the current project
* @ param structureId the structure id for which we want to read the aliases
* @ return the list of aliases pointing to the structure id
* @ throws CmsException if something goes wrong */
public List < CmsAlias > readAliasesByStructureId ( CmsDbContext dbc , CmsProject project , CmsUUID structureId ) throws CmsException { } } | return getVfsDriver ( dbc ) . readAliases ( dbc , project , new CmsAliasFilter ( null , null , structureId ) ) ; |
public class AbstractSSTableSimpleWriter { /** * find available generation and pick up filename from that */
protected static String makeFilename ( File directory , final String keyspace , final String columnFamily ) { } } | final Set < Descriptor > existing = new HashSet < Descriptor > ( ) ; directory . list ( new FilenameFilter ( ) { public boolean accept ( File dir , String name ) { Pair < Descriptor , Component > p = SSTable . tryComponentFromFilename ( dir , name ) ; Descriptor desc = p == null ? null : p . left ; if ( desc == null ) return false ; if ( desc . cfname . equals ( columnFamily ) ) existing . add ( desc ) ; return false ; } } ) ; int maxGen = generation . getAndIncrement ( ) ; for ( Descriptor desc : existing ) { while ( desc . generation > maxGen ) { maxGen = generation . getAndIncrement ( ) ; } } return new Descriptor ( directory , keyspace , columnFamily , maxGen + 1 , Descriptor . Type . TEMP ) . filenameFor ( Component . DATA ) ; |
public class WhitesourceService { /** * Gets additional data for given dependencies .
* @ param orgToken Organization token uniquely identifying the account at white source .
* @ param product The product name or token to update .
* @ param productVersion The product version .
* @ param projectInfos OSS usage information to send to white source .
* @ param userKey user key uniquely identifying the account at white source .
* @ param requesterEmail Email of the WhiteSource user that requests to update WhiteSource .
* @ return Potential result of the dependencies additional data ( license , description , homePageUrl , vulnerabilities , sha1 and displayName ) .
* @ throws WssServiceException In case of errors while getting additional dependency data with white source . */
@ Deprecated public GetDependencyDataResult getDependencyData ( String orgToken , String product , String productVersion , Collection < AgentProjectInfo > projectInfos , String userKey , String requesterEmail , String productToken ) throws WssServiceException { } } | return client . getDependencyData ( requestFactory . newDependencyDataRequest ( orgToken , product , productVersion , projectInfos , userKey , requesterEmail , productToken ) ) ; |
public class HashedTextDataLoader { /** * To be called by the { @ link # initialLoad ( ) } method .
* It will take in the text and add a new document
* vector to the data set . Once all text documents
* have been loaded , this method should never be
* called again . < br >
* This method is thread safe .
* @ param text the text of the document to add
* @ return the index of the created document for the given text . Starts from
* zero and counts up . */
protected int addOriginalDocument ( String text ) { } } | if ( noMoreAdding ) throw new RuntimeException ( "Initial data set has been finalized" ) ; StringBuilder localWorkSpace = workSpace . get ( ) ; List < String > localStorageSpace = storageSpace . get ( ) ; Map < String , Integer > localWordCounts = wordCounts . get ( ) ; if ( localWorkSpace == null ) { localWorkSpace = new StringBuilder ( ) ; localStorageSpace = new ArrayList < String > ( ) ; localWordCounts = new LinkedHashMap < String , Integer > ( ) ; workSpace . set ( localWorkSpace ) ; storageSpace . set ( localStorageSpace ) ; wordCounts . set ( localWordCounts ) ; } localWorkSpace . setLength ( 0 ) ; localStorageSpace . clear ( ) ; tokenizer . tokenize ( text , localWorkSpace , localStorageSpace ) ; for ( String word : localStorageSpace ) { Integer count = localWordCounts . get ( word ) ; if ( count == null ) localWordCounts . put ( word , 1 ) ; else localWordCounts . put ( word , count + 1 ) ; } SparseVector vec = new SparseVector ( dimensionSize , localWordCounts . size ( ) ) ; for ( Iterator < Entry < String , Integer > > iter = localWordCounts . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Entry < String , Integer > entry = iter . next ( ) ; String word = entry . getKey ( ) ; // XXX This code generates a hashcode and then computes the absolute value of that hashcode . If the hashcode is Integer . MIN _ VALUE , then the result will be negative as well ( since Math . abs ( Integer . MIN _ VALUE ) = = Integer . MIN _ VALUE ) .
int index = Math . abs ( word . hashCode ( ) ) % dimensionSize ; vec . set ( index , entry . getValue ( ) ) ; termDocumentFrequencys . addAndGet ( index , entry . getValue ( ) ) ; iter . remove ( ) ; } synchronized ( vectors ) { vectors . add ( vec ) ; return documents ++ ; } |
public class LineItemActivityAssociation { /** * Gets the clickThroughConversionCost value for this LineItemActivityAssociation .
* @ return clickThroughConversionCost * The amount of money to attribute per click through conversion .
* This attribute is
* required for creating a { @ code LineItemActivityAssociation } .
* The currency code is readonly
* and should match the { @ link LineItem } . */
public com . google . api . ads . admanager . axis . v201811 . Money getClickThroughConversionCost ( ) { } } | return clickThroughConversionCost ; |
public class Decoration { /** * Converts the specified string to a Decoration type .
* @ param str
* The string to be converted as a decoration . Possible values
* are :
* < pre >
* . staccato
* ~ general gracing ( abc v1.6 and older )
* ~ irish roll ( abc v2.0)
* uupbow
* vdownbow
* T trill
* H fermata
* L accent or emphasis
* M lowermordent
* P uppermordent
* S segno
* O coda
* < / pre >
* @ return The decoration type corresponding to the given string . < TT > null < / TT >
* is returned if no type matches the string . */
public static byte convertToType ( String str ) { } } | byte type = UNKNOWN ; if ( str . length ( ) > 2 ) { char s = str . charAt ( 0 ) ; char e = str . charAt ( str . length ( ) - 1 ) ; if ( ( ( s == '!' ) && ( e == '!' ) ) || ( ( s == '+' ) && ( e == '+' ) ) ) { str = str . substring ( 1 , str . length ( ) - 1 ) ; } } if ( str . equals ( "." ) ) type = STACCATO ; else if ( str . equals ( "~" ) ) type = ROLL ; else if ( str . equals ( "u" ) ) type = UPBOW ; else if ( str . equals ( "v" ) ) type = DOWNBOW ; else if ( str . equals ( "T" ) ) type = TRILL ; else if ( str . equals ( "H" ) ) type = FERMATA ; else if ( str . equals ( "L" ) ) type = ACCENT ; else if ( str . equals ( "M" ) ) type = LOWERMORDENT ; else if ( str . equals ( "P" ) ) type = UPPERMORDENT ; else if ( str . equals ( "S" ) ) type = SEGNO ; else if ( str . equals ( "O" ) ) type = CODA ; else if ( str . equals ( "trill" ) ) type = TRILL ; else if ( str . equals ( "lowermordent" ) || str . equals ( "mordent" ) ) type = LOWERMORDENT ; else if ( str . equals ( "uppermordent" ) || str . equals ( "pralltriller" ) ) type = UPPERMORDENT ; else if ( str . equals ( ">" ) || str . equals ( "accent" ) || str . equals ( "emphasis" ) ) type = ACCENT ; else if ( str . equals ( "fermata" ) ) type = FERMATA ; else if ( str . equals ( "invertedfermata" ) ) type = FERMATA_INVERTED ; else if ( str . equals ( "tenuto" ) ) type = TENUTO ; else if ( str . equals ( "0" ) ) type = FINGERING_0 ; else if ( str . equals ( "1" ) ) type = FINGERING_1 ; else if ( str . equals ( "2" ) ) type = FINGERING_2 ; else if ( str . equals ( "3" ) ) type = FINGERING_3 ; else if ( str . equals ( "4" ) ) type = FINGERING_4 ; else if ( str . equals ( "5" ) ) type = FINGERING_5 ; else if ( str . equals ( "plus" ) ) type = PLUS ; else if ( str . equals ( "wedge" ) ) type = WEDGE ; else if ( str . equals ( "open" ) ) type = OPEN ; else if ( str . equals ( "thumb" ) || str . equals ( "snap" ) ) type = THUMB ; else if ( str . equals ( "turn" ) ) type = TURN ; else if ( str . equals ( "roll" ) ) type = ROLL ; else if ( str . equals ( "breath" ) ) type = BREATH ; else if ( str . equals ( "shortphrase" ) ) type = SHORT_PHRASE ; else if ( str . equals ( "mediumphrase" ) ) type = MEDIUM_PHRASE ; else if ( str . equals ( "longphrase" ) ) type = LONG_PHRASE ; else if ( str . equals ( "segno" ) ) type = SEGNO ; else if ( str . equals ( "coda" ) ) type = CODA ; else if ( str . equals ( "D.S." ) ) type = DA_SEGNO ; else if ( str . equals ( "D.C." ) || str . equals ( "dacapo" ) ) type = DA_CAPO ; else if ( str . equals ( "dacoda" ) ) type = DA_CODA ; else if ( str . equals ( "fine" ) ) type = FINE ; else if ( str . equals ( "upbow" ) ) type = UPBOW ; else if ( str . equals ( "downbow" ) ) type = DOWNBOW ; else if ( str . equals ( "slide" ) ) type = SLIDE ; else if ( str . equals ( "turnx" ) ) type = TURNX ; else if ( str . equals ( "invertedturn" ) ) type = TURN_INVERTED ; else if ( str . equals ( "invertedturnx" ) ) type = TURNX_INVERTED ; else if ( str . equals ( "arpeggio" ) ) type = ARPEGGIO ; else if ( str . equals ( "trill(" ) ) type = TRILL_START ; else if ( str . equals ( "trill)" ) ) type = TRILL_END ; else if ( str . equals ( "repeatbar" ) ) type = REPEAT_LAST_BAR ; else if ( str . equals ( "repeatbar2" ) ) type = REPEAT_LAST_TWO_BARS ; return type ; |
public class KeyFactory { /** * Generates a public key object from the provided key specification
* ( key material ) .
* @ param keySpec the specification ( key material ) of the public key .
* @ return the public key .
* @ exception InvalidKeySpecException if the given key specification
* is inappropriate for this key factory to produce a public key . */
public final PublicKey generatePublic ( KeySpec keySpec ) throws InvalidKeySpecException { } } | if ( serviceIterator == null ) { return spi . engineGeneratePublic ( keySpec ) ; } Exception failure = null ; KeyFactorySpi mySpi = spi ; do { try { return mySpi . engineGeneratePublic ( keySpec ) ; } catch ( Exception e ) { if ( failure == null ) { failure = e ; } mySpi = nextSpi ( mySpi ) ; } } while ( mySpi != null ) ; if ( failure instanceof RuntimeException ) { throw ( RuntimeException ) failure ; } if ( failure instanceof InvalidKeySpecException ) { throw ( InvalidKeySpecException ) failure ; } throw new InvalidKeySpecException ( "Could not generate public key" , failure ) ; |
public class SubscriberState { /** * Read the subscriber state to the given { @ link DataInput }
* in the order of :
* isMaster
* serverState
* totalUpdates
* streamId
* @ param dataInput the data output to write to
* @ throws IOException */
public static SubscriberState read ( DataInput dataInput ) throws IOException { } } | return SubscriberState . builder ( ) . isMaster ( dataInput . readBoolean ( ) ) . serverState ( dataInput . readUTF ( ) ) . totalUpdates ( dataInput . readInt ( ) ) . streamId ( dataInput . readInt ( ) ) . build ( ) ; |
public class AutoMlClient { /** * Deletes a model . Returns ` google . protobuf . Empty ` in the
* [ response ] [ google . longrunning . Operation . response ] field when it completes , and ` delete _ details `
* in the [ metadata ] [ google . longrunning . Operation . metadata ] field .
* < p > Sample code :
* < pre > < code >
* try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) {
* ModelName name = ModelName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ MODEL ] " ) ;
* autoMlClient . deleteModelAsync ( name . toString ( ) ) . get ( ) ;
* < / code > < / pre >
* @ param name Resource name of the model being deleted .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , OperationMetadata > deleteModelAsync ( String name ) { } } | DeleteModelRequest request = DeleteModelRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return deleteModelAsync ( request ) ; |
public class Mutations { /** * No need to occupy externalizer ids when we have a limited set of options */
static < K , V , T , R > void writeTo ( ObjectOutput output , Mutation < K , V , R > mutation ) throws IOException { } } | BaseMutation bm = ( BaseMutation ) mutation ; DataConversion . writeTo ( output , bm . keyDataConversion ) ; DataConversion . writeTo ( output , bm . valueDataConversion ) ; byte type = mutation . type ( ) ; output . writeByte ( type ) ; switch ( type ) { case ReadWrite . TYPE : output . writeObject ( ( ( ReadWrite < K , V , ? > ) mutation ) . f ) ; break ; case ReadWriteWithValue . TYPE : ReadWriteWithValue < K , V , T , R > rwwv = ( ReadWriteWithValue < K , V , T , R > ) mutation ; output . writeObject ( rwwv . argument ) ; output . writeObject ( rwwv . f ) ; break ; case Write . TYPE : output . writeObject ( ( ( Write < K , V > ) mutation ) . f ) ; break ; case WriteWithValue . TYPE : WriteWithValue < K , V , T > wwv = ( WriteWithValue < K , V , T > ) mutation ; output . writeObject ( wwv . argument ) ; output . writeObject ( wwv . f ) ; break ; } |
public class JSONAssert { /** * Asserts that the JSONObject provided matches the expected JSONObject . If it isn ' t it throws an
* { @ link AssertionError } .
* @ param expected Expected JSONObject
* @ param actual JSONObject to compare
* @ param strict Enables strict checking
* @ throws JSONException JSON parsing error */
public static void assertEquals ( JSONObject expected , JSONObject actual , boolean strict ) throws JSONException { } } | assertEquals ( expected , actual , strict ? JSONCompareMode . STRICT : JSONCompareMode . LENIENT ) ; |
public class CmsImportExportManager { /** * Adds a import version class name to the configuration . < p >
* @ param importVersionClass the import version class name to add */
public void addImportVersionClass ( I_CmsImport importVersionClass ) { } } | if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_ADDED_IMPORT_VERSION_1 , importVersionClass ) ) ; } m_importVersionClasses . add ( importVersionClass ) ; |
public class Model { /** * Analyzes the entire json object and creates a brand - new . . .
* instance from its representation .
* TODO : Figure out how to make this accesible without . . .
* creating a dummy instance .
* @ param json The JSONObject representation of the object .
* @ return The object T if able to convert and null otherwise . */
public T fromJson ( JSONObject json ) { } } | T object = null ; try { object = clazz . newInstance ( ) ; ( ( Model ) object ) . setContext ( context ) ; Field [ ] fields = clazz . getDeclaredFields ( ) ; try { for ( Field field : fields ) { if ( ! field . isAnnotationPresent ( ModelField . class ) ) { continue ; } String name = field . getName ( ) ; boolean was = field . isAccessible ( ) ; field . setAccessible ( true ) ; switch ( toFieldEnum ( field ) ) { case INT : field . setInt ( object , json . optInt ( name , 0 ) ) ; break ; case LONG : field . setLong ( object , json . optLong ( name , 0 ) ) ; break ; case FLOAT : field . setFloat ( object , json . optLong ( name , 0 ) ) ; break ; case DOUBLE : field . setDouble ( object , json . optDouble ( name , 0 ) ) ; break ; case STRING : field . set ( object , json . opt ( name ) ) ; break ; case BOOLEAN : field . setBoolean ( object , json . optBoolean ( name , false ) ) ; break ; case LIST : JSONArray list = json . optJSONArray ( name ) ; try { if ( list != null ) { List < String > stringList = new ArrayList < > ( ) ; for ( int i = 0 ; i < list . length ( ) ; i ++ ) { stringList . add ( list . getString ( i ) ) ; } field . set ( object , stringList ) ; } } catch ( JSONException e ) { // TODO .
} break ; } field . setAccessible ( was ) ; } } catch ( IllegalAccessException e ) { e . printStackTrace ( ) ; } } catch ( IllegalAccessException | InstantiationException e ) { e . printStackTrace ( ) ; } return object ; |
public class ExistingChannelModelControllerClient { /** * Create and add model controller handler to an existing management channel handler .
* @ param handler the channel handler
* @ return the created client */
public static ModelControllerClient createAndAdd ( final ManagementChannelHandler handler ) { } } | final ExistingChannelModelControllerClient client = new ExistingChannelModelControllerClient ( handler ) ; handler . addHandlerFactory ( client ) ; return client ; |
public class DescribeLoadBalancersRequest { /** * The Amazon Resource Names ( ARN ) of the load balancers . You can specify up to 20 load balancers in a single call .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLoadBalancerArns ( java . util . Collection ) } or { @ link # withLoadBalancerArns ( java . util . Collection ) } if you
* want to override the existing values .
* @ param loadBalancerArns
* The Amazon Resource Names ( ARN ) of the load balancers . You can specify up to 20 load balancers in a single
* call .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeLoadBalancersRequest withLoadBalancerArns ( String ... loadBalancerArns ) { } } | if ( this . loadBalancerArns == null ) { setLoadBalancerArns ( new java . util . ArrayList < String > ( loadBalancerArns . length ) ) ; } for ( String ele : loadBalancerArns ) { this . loadBalancerArns . add ( ele ) ; } return this ; |
public class ServerJFapCommunicator { /** * Calls through to the JFAPCommunicator class to do the real handshaking .
* @ see com . ibm . ws . sib . comms . common . JFAPCommunicator # initiateCommsHandshaking ( ) */
@ Override protected void initiateCommsHandshaking ( ) throws SIConnectionLostException , SIConnectionDroppedException { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "initiateCommsHandshaking" ) ; initiateCommsHandshakingImpl ( true ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "initiateCommsHandshaking" ) ; |
public class ResUtils { /** * Converts any path into something that can be placed in an Android directory .
* Traverses any subdirectories and flattens it all into a single filename . Also
* gets rid of commonly seen illegal characters in tz identifiers , and lower cases
* the entire thing .
* @ param path the path to convert
* @ return a flat path with no directories ( and lower - cased ) */
private static String convertPathToResource ( String path ) { } } | File file = new File ( path ) ; List < String > parts = new ArrayList < String > ( ) ; do { parts . add ( file . getName ( ) ) ; file = file . getParentFile ( ) ; } while ( file != null ) ; StringBuffer sb = new StringBuffer ( ) ; int size = parts . size ( ) ; for ( int a = size - 1 ; a >= 0 ; a -- ) { if ( sb . length ( ) > 0 ) { sb . append ( "_" ) ; } sb . append ( parts . get ( a ) ) ; } // TODO : Better regex replacement
return sb . toString ( ) . replace ( '-' , '_' ) . replace ( "+" , "plus" ) . toLowerCase ( Locale . US ) ; |
public class ServiceContextFactory { /** * Convenience method , it requires that the request is a HttpServletRequest .
* @ see # createServiceContext ( HttpServletRequest ) */
public static ServiceContext createServiceContext ( ServletRequest request ) { } } | if ( ! ( request instanceof HttpServletRequest ) ) { throw new IllegalArgumentException ( "Expected HttpServletRequest" ) ; } return createServiceContext ( ( HttpServletRequest ) request ) ; |
public class QueryToolChest { /** * Returns a CacheStrategy to be used to load data into the cache and remove it from the cache .
* This is optional . If it returns null , caching is effectively disabled for the query .
* @ param query The query whose results might be cached
* @ param < T > The type of object that will be stored in the cache
* @ return A CacheStrategy that can be used to populate and read from the Cache */
@ Nullable public < T > CacheStrategy < ResultType , T , QueryType > getCacheStrategy ( QueryType query ) { } } | return null ; |
public class RedshiftMetadataMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RedshiftMetadata redshiftMetadata , ProtocolMarshaller protocolMarshaller ) { } } | if ( redshiftMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( redshiftMetadata . getRedshiftDatabase ( ) , REDSHIFTDATABASE_BINDING ) ; protocolMarshaller . marshall ( redshiftMetadata . getDatabaseUserName ( ) , DATABASEUSERNAME_BINDING ) ; protocolMarshaller . marshall ( redshiftMetadata . getSelectSqlQuery ( ) , SELECTSQLQUERY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class rnatip_stats { /** * Use this API to fetch statistics of rnatip _ stats resource of given name . */
public static rnatip_stats get ( nitro_service service , String Rnatip ) throws Exception { } } | rnatip_stats obj = new rnatip_stats ( ) ; obj . set_Rnatip ( Rnatip ) ; rnatip_stats response = ( rnatip_stats ) obj . stat_resource ( service ) ; return response ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcSIUnitName ( ) { } } | if ( ifcSIUnitNameEEnum == null ) { ifcSIUnitNameEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1057 ) ; } return ifcSIUnitNameEEnum ; |
public class SecurityContextProviderImpl { /** * { @ inheritDoc } */
@ Override public ThreadContext captureThreadContext ( Map < String , String > execProps , Map < String , ? > threadContextConfig ) { } } | String jaasLoginContextEntry = getConfigNameForRef ( ( String ) threadContextConfig . get ( JAAS_LOGINCONTEXTENTRY_REF ) ) ; return new SecurityContextImpl ( true , jaasLoginContextEntry ) ; |
public class ClassNode { /** * Accept a visitor that visit all public and non - abstract class node
* that has been annotated by the class represented by this ` ClassNode `
* @ param visitor the function that take ` ClassNode ` as argument
* @ return this ` ClassNode ` instance */
public ClassNode visitPublicNotAbstractAnnotatedClasses ( $ . Visitor < ClassNode > visitor ) { } } | return visitAnnotatedClasses ( $ . guardedVisitor ( new $ . Predicate < ClassNode > ( ) { @ Override public boolean test ( ClassNode classNode ) { return classNode . publicNotAbstract ( ) ; } } , visitor ) ) ; |
public class Solo { /** * Clicks a MenuItem displaying the specified text .
* @ param text the text displayed by the MenuItem . The parameter will be interpreted as a regular expression
* @ param subMenu { @ code true } if the menu item could be located in a sub menu */
public void clickOnMenuItem ( String text , boolean subMenu ) { } } | if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "clickOnMenuItem(\"" + text + "\", " + subMenu + ")" ) ; } clicker . clickOnMenuItem ( text , subMenu ) ; |
public class SpatialSupport { /** * Register spatial types to the given codegen module
* @ param module module to be customized for spatial support */
public static void addSupport ( AbstractModule module ) { } } | module . bindInstance ( SQLCodegenModule . ENTITYPATH_TYPE , RelationalPathSpatial . class ) ; registerTypes ( module . get ( Configuration . class ) ) ; registerTypes ( module . get ( TypeMappings . class ) ) ; addImports ( module ) ; |
public class JSTypeRegistry { /** * Creates an enum type .
* @ param name The human - readable name associated with the enum , or null if unknown . */
public EnumType createEnumType ( String name , Node source , JSType elementsType ) { } } | return new EnumType ( this , name , source , elementsType ) ; |
public class RecyclerViewPager { /** * 当滑动状态改变时所做的各种处理 。 */
@ Override public void onScrollStateChanged ( int state ) { } } | if ( DEBUG ) Log . d ( "RecyclerViewPager" , "onScrollStateChanged:" + state ) ; super . onScrollStateChanged ( state ) ; // 当处于滑动状态时
if ( state == SCROLL_STATE_DRAGGING ) { mNeedAdjust = true ; mCurView = getLayoutManager ( ) . canScrollHorizontally ( ) ? ViewUtils . getCenterXChild ( this ) : ViewUtils . getCenterYChild ( this ) ; if ( mCurView != null ) { if ( mHasCalledOnPageChanged ) { // While rvp is scrolling , mPositionBeforeScroll will be previous value .
mPositionBeforeScroll = getChildLayoutPosition ( mCurView ) ; mHasCalledOnPageChanged = false ; } if ( DEBUG ) { Log . d ( "RecyclerViewPager" , "mPositionBeforeScroll:" + mPositionBeforeScroll ) ; } mFisrtLeftWhenDragging = mCurView . getLeft ( ) ; mFirstTopWhenDragging = mCurView . getTop ( ) ; } else { mPositionBeforeScroll = - 1 ; } mTouchSpan = 0 ; } else if ( state == SCROLL_STATE_SETTLING ) { // 处于惯性状态
mNeedAdjust = false ; if ( mCurView != null ) { if ( getLayoutManager ( ) . canScrollHorizontally ( ) ) { mTouchSpan = mCurView . getLeft ( ) - mFisrtLeftWhenDragging ; } else { mTouchSpan = mCurView . getTop ( ) - mFirstTopWhenDragging ; } } else { mTouchSpan = 0 ; } mCurView = null ; } else if ( state == SCROLL_STATE_IDLE ) { // 处于静止状态
if ( mNeedAdjust ) { int targetPosition = getLayoutManager ( ) . canScrollHorizontally ( ) ? ViewUtils . getCenterXChildPosition ( this ) : ViewUtils . getCenterYChildPosition ( this ) ; if ( mCurView != null ) { targetPosition = getChildAdapterPosition ( mCurView ) ; if ( getLayoutManager ( ) . canScrollHorizontally ( ) ) { int spanX = mCurView . getLeft ( ) - mFisrtLeftWhenDragging ; // if user is tending to cancel paging action , don ' t perform position changing
if ( spanX > mCurView . getWidth ( ) * mTriggerOffset && mCurView . getLeft ( ) >= mMaxLeftWhenDragging ) { targetPosition -- ; } else if ( spanX < mCurView . getWidth ( ) * - mTriggerOffset && mCurView . getLeft ( ) <= mMinLeftWhenDragging ) { targetPosition ++ ; } } else { int spanY = mCurView . getTop ( ) - mFirstTopWhenDragging ; if ( spanY > mCurView . getHeight ( ) * mTriggerOffset && mCurView . getTop ( ) >= mMaxTopWhenDragging ) { targetPosition -- ; } else if ( spanY < mCurView . getHeight ( ) * - mTriggerOffset && mCurView . getTop ( ) <= mMinTopWhenDragging ) { targetPosition ++ ; } } } smoothScrollToPosition ( safeTargetPosition ( targetPosition , mViewPagerAdapter . getItemCount ( ) ) ) ; mCurView = null ; } else if ( mSmoothScrollTargetPosition != mPositionBeforeScroll ) { if ( DEBUG ) { Log . d ( "RecyclerViewPager" , "onScrollStateChanged SCROLL_STATE_IDLE:" + mSmoothScrollTargetPosition ) ; Log . d ( "RecyclerViewPager" , "onScrollStateChanged SCROLL_STATE_IDLE:" + mPositionBeforeScroll ) ; } if ( mOnPageChangedListeners != null ) { for ( OnPageChangedListener onPageChangedListener : mOnPageChangedListeners ) { if ( onPageChangedListener != null ) { onPageChangedListener . OnPageChanged ( mPositionBeforeScroll , mSmoothScrollTargetPosition ) ; } } } mHasCalledOnPageChanged = true ; mPositionBeforeScroll = mSmoothScrollTargetPosition ; } // reset
mMaxLeftWhenDragging = Integer . MIN_VALUE ; mMinLeftWhenDragging = Integer . MAX_VALUE ; mMaxTopWhenDragging = Integer . MIN_VALUE ; mMinTopWhenDragging = Integer . MAX_VALUE ; } |
public class FileUtils { /** * Calculates the SHA - 1 hash for an { @ link InputStream }
* @ param in the { @ link InputStream } to calculate the hash for
* @ return { @ link String } representation of the hash */
public static String hash ( InputStream in ) { } } | try { MessageDigest digest = MessageDigest . getInstance ( "sha1" ) ; ByteArrayPool . withByteArray ( buffer -> { try { int length ; while ( ( length = in . read ( buffer ) ) != - 1 ) { digest . update ( buffer , 0 , length ) ; } } catch ( IOException e ) { throw Exceptions . propagate ( e ) ; } } ) ; return String . format ( "%040x" , new BigInteger ( 1 , digest . digest ( ) ) ) ; } catch ( NoSuchAlgorithmException e ) { throw Exceptions . propagate ( e ) ; } |
public class Value { /** * Returns an { @ code ARRAY < INT64 > } value .
* @ param v the source of element values , which may be null to produce a value for which { @ code
* isNull ( ) } is { @ code true } */
public static Value int64Array ( @ Nullable long [ ] v ) { } } | return int64Array ( v , 0 , v == null ? 0 : v . length ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.