signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class EvernoteExceptionUtils { /** * convert to { @ link EvernoteException }
* @ param ex original exception
* @ return converted exception */
public static EvernoteException convert ( Exception ex ) { } } | // construct exception message
final StringBuilder sb = new StringBuilder ( ) ; sb . append ( ex . getClass ( ) . getName ( ) ) ; if ( ex instanceof EDAMUserException ) { final EDAMErrorCode errorCode = ( ( EDAMUserException ) ex ) . getErrorCode ( ) ; if ( errorCode == null ) { sb . append ( ", ErrorCode [null" ) ; } else { sb . append ( ", ErrorCode [" + errorCode + "(" + errorCode . getValue ( ) + ")" ) ; } sb . append ( "], Parameter [" ) ; sb . append ( ( ( EDAMUserException ) ex ) . getParameter ( ) ) ; sb . append ( "]" ) ; } else if ( ex instanceof EDAMSystemException ) { final EDAMErrorCode errorCode = ( ( EDAMSystemException ) ex ) . getErrorCode ( ) ; if ( errorCode == null ) { sb . append ( ", ErrorCode [null" ) ; } else { sb . append ( ", ErrorCode [" + errorCode + "(" + errorCode . getValue ( ) + ")" ) ; } sb . append ( "], Message [" ) ; sb . append ( ( ( EDAMSystemException ) ex ) . getMessage ( ) ) ; sb . append ( "], RateLimitDuration [" ) ; sb . append ( ( ( EDAMSystemException ) ex ) . getRateLimitDuration ( ) ) ; sb . append ( "]" ) ; } else if ( ex instanceof EDAMNotFoundException ) { sb . append ( ", Identifier [" ) ; sb . append ( ( ( EDAMNotFoundException ) ex ) . getIdentifier ( ) ) ; sb . append ( "], Key [" ) ; sb . append ( ( ( EDAMNotFoundException ) ex ) . getKey ( ) ) ; sb . append ( "]" ) ; } else { sb . append ( ", Message [" ) ; sb . append ( ex . getMessage ( ) ) ; sb . append ( "]" ) ; } return new EvernoteException ( sb . toString ( ) , ex ) ; |
public class SegmentsUtil { /** * Copy bytes of segments to output view .
* Note : It just copies the data in , not include the length .
* @ param segments source segments
* @ param offset offset for segments
* @ param sizeInBytes size in bytes
* @ param target target output view */
public static void copyToView ( MemorySegment [ ] segments , int offset , int sizeInBytes , DataOutputView target ) throws IOException { } } | for ( MemorySegment sourceSegment : segments ) { int curSegRemain = sourceSegment . size ( ) - offset ; if ( curSegRemain > 0 ) { int copySize = Math . min ( curSegRemain , sizeInBytes ) ; byte [ ] bytes = allocateReuseBytes ( copySize ) ; sourceSegment . get ( offset , bytes , 0 , copySize ) ; target . write ( bytes , 0 , copySize ) ; sizeInBytes -= copySize ; offset = 0 ; } else { offset -= sourceSegment . size ( ) ; } if ( sizeInBytes == 0 ) { return ; } } if ( sizeInBytes != 0 ) { throw new RuntimeException ( "No copy finished, this should be a bug, " + "The remaining length is: " + sizeInBytes ) ; } |
public class HiveMetaStoreBridge { /** * Construct the qualified name used to uniquely identify a Database instance in Atlas .
* @ param clusterName Name of the cluster to which the Hive component belongs
* @ param dbName Name of the Hive database
* @ return Unique qualified name to identify the Database instance in Atlas . */
public static String getDBQualifiedName ( String clusterName , String dbName ) { } } | return String . format ( "%s@%s" , dbName . toLowerCase ( ) , clusterName ) ; |
public class ApiOvhDomain { /** * Update the glue record
* REST : POST / domain / { serviceName } / glueRecord / { host } / update
* @ param ips [ required ] Ips of the glue record
* @ param serviceName [ required ] The internal name of your domain
* @ param host [ required ] Host of the glue record */
public net . minidev . ovh . api . domain . OvhTask serviceName_glueRecord_host_update_POST ( String serviceName , String host , String [ ] ips ) throws IOException { } } | String qPath = "/domain/{serviceName}/glueRecord/{host}/update" ; StringBuilder sb = path ( qPath , serviceName , host ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ips" , ips ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , net . minidev . ovh . api . domain . OvhTask . class ) ; |
public class Value { /** * Returns a value that is a percentage of the table ' s width . */
static public < C , T extends C > Value < C , T > percentWidth ( Toolkit < C , T > toolkit , final float percent ) { } } | return new TableValue < C , T > ( toolkit ) { @ Override public float get ( T table ) { return toolkit . getWidth ( table ) * percent ; } } ; |
public class CommerceVirtualOrderItemPersistenceImpl { /** * Returns the number of commerce virtual order items where commerceOrderItemId = & # 63 ; .
* @ param commerceOrderItemId the commerce order item ID
* @ return the number of matching commerce virtual order items */
@ Override public int countByCommerceOrderItemId ( long commerceOrderItemId ) { } } | FinderPath finderPath = FINDER_PATH_COUNT_BY_COMMERCEORDERITEMID ; Object [ ] finderArgs = new Object [ ] { commerceOrderItemId } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 2 ) ; query . append ( _SQL_COUNT_COMMERCEVIRTUALORDERITEM_WHERE ) ; query . append ( _FINDER_COLUMN_COMMERCEORDERITEMID_COMMERCEORDERITEMID_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( commerceOrderItemId ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ; |
public class ISO9660File { /** * Set file version
* @ param version File version
* @ throws HandlerException Invalid file version */
public void setVersion ( int version ) throws HandlerException { } } | if ( version < 1 || version > ISO9660Constants . MAX_FILE_VERSION ) { throw new HandlerException ( "Invalid file version: " + version ) ; } this . version = version ; if ( parent != null ) { parent . forceSort ( ) ; } |
public class NativeSplitter { /** * Compute the score of a segment */
private SegmentScoreEntry computeSegmentScore ( String segment ) { } } | if ( this . stopList . contains ( segment ) ) return SegmentScoreEntry . SCORE_ZERO ; CompostIndexEntry closestEntry = compostIndex . getEntry ( segment ) ; double indexSimilarity = 0.0 ; if ( closestEntry == null ) { if ( this . opt . getSegmentSimilarityThreshold ( ) == 1 ) // do not compare similarity of this segment to the index
return SegmentScoreEntry . SCORE_ZERO ; // Find an entry by similarity
Iterator < CompostIndexEntry > it = compostIndex . closedEntryCandidateIterator ( segment ) ; int entryLength = segment . length ( ) ; double dist = 0 ; CompostIndexEntry entry ; while ( it . hasNext ( ) ) { entry = it . next ( ) ; dist = distance . computeNormalized ( segment , entry . getText ( ) ) ; if ( Math . abs ( entry . getText ( ) . length ( ) - entryLength ) <= 3 && dist >= this . opt . getSegmentSimilarityThreshold ( ) ) { indexSimilarity = dist ; closestEntry = entry ; } } if ( closestEntry == null ) { // could not find any close entry in the compost index
return SegmentScoreEntry . SCORE_ZERO ; } } else { indexSimilarity = 1f ; } int inCorpus = 0 ; int inDico = closestEntry . isInDico ( ) || closestEntry . isInNeoClassicalPrefix ( ) ? 1 : 0 ; // retrieves all sw terms that have the same lemma
Collection < Term > corpusTerms = swtLemmaIndex . getTerms ( segment ) ; double wr = 0f ; for ( Iterator < Term > it = corpusTerms . iterator ( ) ; it . hasNext ( ) ; ) wr += Math . pow ( 10 , it . next ( ) . getSpecificity ( ) ) ; double dataCorpus ; if ( closestEntry . isInCorpus ( ) && ! corpusTerms . isEmpty ( ) ) { dataCorpus = wr / getMaxSpec ( ) ; inCorpus = 1 ; } else { dataCorpus = 0 ; inCorpus = closestEntry . isInNeoClassicalPrefix ( ) ? 1 : 0 ; } double score = this . opt . getAlpha ( ) * indexSimilarity + this . opt . getBeta ( ) * inDico + this . opt . getGamma ( ) * inCorpus + this . opt . getDelta ( ) * dataCorpus ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( "Score for {} is {} [alpha: {} beta: {} gamma: {} delta: {}]" , segment , score , indexSimilarity , inDico , inCorpus , dataCorpus ) ; } return new SegmentScoreEntry ( segment , findSegmentLemma ( segment ) , score , closestEntry ) ; |
public class ProjectableSQLQuery { /** * Add the given String literal as a join flag to the last added join
* @ param flag join flag
* @ param position position
* @ return the current object */
@ Override @ SuppressWarnings ( "unchecked" ) public Q addJoinFlag ( String flag , JoinFlag . Position position ) { } } | queryMixin . addJoinFlag ( new JoinFlag ( flag , position ) ) ; return ( Q ) this ; |
public class PJsonObject { /** * Get a property as a json array or default .
* @ param key the property name
* @ param defaultValue default */
public final PJsonArray optJSONArray ( final String key , final PJsonArray defaultValue ) { } } | PJsonArray result = optJSONArray ( key ) ; return result != null ? result : defaultValue ; |
public class HttpHeaders { /** * @ deprecated Use { @ link # add ( CharSequence , Object ) } instead .
* @ see # addDateHeader ( HttpMessage , CharSequence , Date ) */
@ Deprecated public static void addDateHeader ( HttpMessage message , String name , Date value ) { } } | message . headers ( ) . add ( name , value ) ; |
public class HttpSessionAttributeObserver { /** * Method sessionAttributeSet
* @ see com . ibm . wsspi . session . ISessionStateObserver # sessionAttributeSet ( com . ibm . wsspi . session . ISession , java . lang . Object , java . lang . Object , java . lang . Object ) */
public void sessionAttributeSet ( ISession session , Object name , Object oldValue , Boolean oldIsListener , Object newValue , Boolean newIsListener ) { } } | HttpSession httpsession = ( HttpSession ) _adapter . adapt ( session ) ; HttpSessionBindingEvent addEvent = null ; // only init if necessary
HttpSessionBindingEvent replaceEvent = null ; // only init this if
// necessary . . done below
// do binding listeners first to be consistent with v6.1
if ( ( oldValue != null ) && ( oldIsListener . booleanValue ( ) ) ) { replaceEvent = new HttpSessionBindingEvent ( httpsession , ( String ) name , oldValue ) ; // if ( oldValue instanceof HttpSessionBindingListener )
( ( HttpSessionBindingListener ) oldValue ) . valueUnbound ( replaceEvent ) ; } if ( ( newValue != null ) && ( newIsListener . booleanValue ( ) ) ) { // ( newValue instanceof HttpSessionBindingListener ) ) {
addEvent = new HttpSessionBindingEvent ( httpsession , ( String ) name , newValue ) ; ( ( HttpSessionBindingListener ) newValue ) . valueBound ( addEvent ) ; } // now do attribute listeners
HttpSessionAttributeListener listener = null ; for ( int i = 0 ; i < _sessionAttributeListeners . size ( ) ; i ++ ) { listener = ( HttpSessionAttributeListener ) _sessionAttributeListeners . get ( i ) ; if ( oldValue != null ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . FINE , methodClassName , "sessionAttrSet" , "Calling attributeReplace on listener:" + listener ) ; } if ( replaceEvent == null ) replaceEvent = new HttpSessionBindingEvent ( httpsession , ( String ) name , oldValue ) ; listener . attributeReplaced ( replaceEvent ) ; } else { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . FINE , methodClassName , "sessionAttrSet" , "Calling attributeCreated on listener:" + listener ) ; } if ( addEvent == null ) addEvent = new HttpSessionBindingEvent ( httpsession , ( String ) name , newValue ) ; listener . attributeAdded ( addEvent ) ; } } |
public class S3CryptoModuleAE { /** * Same as { @ link # decipher ( GetObjectRequest , long [ ] , long [ ] , S3Object ) }
* but makes use of an instruction file with the specified suffix .
* @ param instFileSuffix never null or empty ( which is assumed to have been
* sanitized upstream . ) */
private S3Object decipherWithInstFileSuffix ( GetObjectRequest req , long [ ] desiredRange , long [ ] cryptoRange , S3Object retrieved , String instFileSuffix ) { } } | final S3ObjectId id = req . getS3ObjectId ( ) ; // Check if encrypted info is in an instruction file
final S3ObjectWrapper ifile = fetchInstructionFile ( id , instFileSuffix ) ; if ( ifile == null ) { throw new SdkClientException ( "Instruction file with suffix " + instFileSuffix + " is not found for " + retrieved ) ; } try { return decipherWithInstructionFile ( req , desiredRange , cryptoRange , new S3ObjectWrapper ( retrieved , id ) , ifile ) ; } finally { closeQuietly ( ifile , log ) ; } |
public class Pointer { /** * Serialize pointer map .
* @ param pointer Pointer data .
* @ return Pointer content bytes . */
@ NotNull public static byte [ ] serializePointer ( @ NotNull Map < String , String > pointer ) { } } | final Map < String , String > data = new TreeMap < > ( pointer ) ; final StringBuilder buffer = new StringBuilder ( ) ; // Write version .
{ String version = data . remove ( VERSION ) ; if ( version == null ) { version = VERSION_URL ; } buffer . append ( VERSION ) . append ( ' ' ) . append ( version ) . append ( '\n' ) ; } for ( Map . Entry < String , String > entry : data . entrySet ( ) ) { buffer . append ( entry . getKey ( ) ) . append ( ' ' ) . append ( entry . getValue ( ) ) . append ( '\n' ) ; } return buffer . toString ( ) . getBytes ( StandardCharsets . UTF_8 ) ; |
public class CommerceWishListItemPersistenceImpl { /** * Returns the commerce wish list items before and after the current commerce wish list item in the ordered set where CPInstanceUuid = & # 63 ; .
* @ param commerceWishListItemId the primary key of the current commerce wish list item
* @ param CPInstanceUuid the cp instance uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next commerce wish list item
* @ throws NoSuchWishListItemException if a commerce wish list item with the primary key could not be found */
@ Override public CommerceWishListItem [ ] findByCPInstanceUuid_PrevAndNext ( long commerceWishListItemId , String CPInstanceUuid , OrderByComparator < CommerceWishListItem > orderByComparator ) throws NoSuchWishListItemException { } } | CommerceWishListItem commerceWishListItem = findByPrimaryKey ( commerceWishListItemId ) ; Session session = null ; try { session = openSession ( ) ; CommerceWishListItem [ ] array = new CommerceWishListItemImpl [ 3 ] ; array [ 0 ] = getByCPInstanceUuid_PrevAndNext ( session , commerceWishListItem , CPInstanceUuid , orderByComparator , true ) ; array [ 1 ] = commerceWishListItem ; array [ 2 ] = getByCPInstanceUuid_PrevAndNext ( session , commerceWishListItem , CPInstanceUuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class GroovyCategorySupport { /** * This method is used to pull all the new methods out of the local thread context with a particular name .
* @ param name the method name of interest
* @ return the list of methods */
public static CategoryMethodList getCategoryMethods ( String name ) { } } | final ThreadCategoryInfo categoryInfo = THREAD_INFO . getInfoNullable ( ) ; return categoryInfo == null ? null : categoryInfo . getCategoryMethods ( name ) ; |
public class FieldAugment { /** * Creates a reflection - based augment which will directly access the listed field name . If this field does not exist or the field
* is not capable of storing the requested type , { @ code null } is returned instead . */
private static < T , F > FieldAugment < T , F > tryCreateReflectionAugment ( Class < T > type , Class < ? super F > fieldType , String name , F defaultValue ) { } } | Field f = findField ( type , fieldType , name ) ; if ( f != null && typeIsAssignmentCompatible ( f . getType ( ) , fieldType ) ) return new ReflectionFieldAugment < T , F > ( f , fieldType , defaultValue ) ; return null ; |
public class DropSpatialIndexGeneratorGeoDB { /** * Generates the SQL statement to drop the spatial index if it exists .
* @ param statement
* the drop spatial index statement .
* @ param database
* the database .
* @ return the drop spatial index statement , if the index exists . */
public Sql [ ] generateSqlIfExists ( final DropSpatialIndexStatement statement , final Database database ) { } } | final String catalogName = statement . getTableCatalogName ( ) ; final String schemaName = statement . getTableSchemaName ( ) ; final String tableName = statement . getTableName ( ) ; final SpatialIndexExistsPrecondition precondition = new SpatialIndexExistsPrecondition ( ) ; precondition . setCatalogName ( catalogName ) ; precondition . setSchemaName ( schemaName ) ; precondition . setTableName ( tableName ) ; final DatabaseObject example = precondition . getExample ( database , tableName ) ; try { // If a spatial index exists on the table , drop it .
if ( SnapshotGeneratorFactory . getInstance ( ) . has ( example , database ) ) { return generateSql ( statement , database , null ) ; } } catch ( final Exception e ) { throw new UnexpectedLiquibaseException ( e ) ; } return new Sql [ 0 ] ; |
public class KinesisDataFetcher { /** * Starts shutting down the fetcher . Must be called to allow { @ link KinesisDataFetcher # runFetcher ( ) } to complete .
* Once called , the shutdown procedure will be executed and all shard consuming threads will be interrupted . */
public void shutdownFetcher ( ) { } } | running = false ; if ( mainThread != null ) { mainThread . interrupt ( ) ; // the main thread may be sleeping for the discovery interval
} if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "Shutting down the shard consumer threads of subtask {} ..." , indexOfThisConsumerSubtask ) ; } shardConsumersExecutor . shutdownNow ( ) ; |
public class ICalendar { /** * Sets the location that the calendar data can be refreshed from .
* @ param url the source or null to remove
* @ return the property object that was created
* @ see < a
* href = " http : / / tools . ietf . org / html / draft - ietf - calext - extensions - 01 # page - 8 " > draft - ietf - calext - extensions - 01
* p . 8 < / a > */
public Source setSource ( String url ) { } } | Source property = ( url == null ) ? null : new Source ( url ) ; setSource ( property ) ; return property ; |
public class JolokiaHttpHandler { /** * Return hostnmae of given address , but only when reverse DNS lookups are allowed */
private String getHostName ( InetSocketAddress address ) { } } | return configuration . getAsBoolean ( ConfigKey . ALLOW_DNS_REVERSE_LOOKUP ) ? address . getHostName ( ) : null ; |
public class DomainsInner { /** * List keys for a domain .
* List the two keys used to publish to a domain .
* @ param resourceGroupName The name of the resource group within the user ' s subscription .
* @ param domainName Name of the domain
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DomainSharedAccessKeysInner object */
public Observable < DomainSharedAccessKeysInner > listSharedAccessKeysAsync ( String resourceGroupName , String domainName ) { } } | return listSharedAccessKeysWithServiceResponseAsync ( resourceGroupName , domainName ) . map ( new Func1 < ServiceResponse < DomainSharedAccessKeysInner > , DomainSharedAccessKeysInner > ( ) { @ Override public DomainSharedAccessKeysInner call ( ServiceResponse < DomainSharedAccessKeysInner > response ) { return response . body ( ) ; } } ) ; |
public class DatabaseUtils { /** * Reads a Long out of a column in a Cursor and writes it to a ContentValues .
* Adds nothing to the ContentValues if the column isn ' t present or if its value is null .
* @ param cursor The cursor to read from
* @ param column The column to read
* @ param values The { @ link ContentValues } to put the value into */
public static void cursorLongToContentValuesIfPresent ( Cursor cursor , ContentValues values , String column ) { } } | final int index = cursor . getColumnIndex ( column ) ; if ( index != - 1 && ! cursor . isNull ( index ) ) { values . put ( column , cursor . getLong ( index ) ) ; } |
public class UserConfiguration { /** * convert Configuration to an XML file so it can be serialized
* add to an already existing xml file .
* @ param xw the XML writer to use
* @ return the writer again
* @ throws IOException
* @ see org . biojava . nbio . structure . align . webstart . ConfigXMLHandler */
public XMLWriter toXML ( XMLWriter xw ) throws IOException { } } | xw . printRaw ( "<?xml version='1.0' standalone='no' ?>" ) ; // xw . printRaw ( " < ! DOCTYPE " + XML _ CONTENT _ TYPE + " SYSTEM ' " + XML _ DTD + " ' > " ) ;
xw . openTag ( "JFatCatConfig" ) ; xw . openTag ( "PDBFILEPATH" ) ; // we don ; t serialize the tempdir . . .
String tempdir = System . getProperty ( TMP_DIR ) ; if ( ! pdbFilePath . equals ( tempdir ) ) xw . attribute ( "path" , pdbFilePath ) ; xw . attribute ( "fetchBehavior" , fetchBehavior + "" ) ; xw . attribute ( "obsoleteBehavior" , obsoleteBehavior + "" ) ; xw . attribute ( "fileFormat" , fileFormat ) ; xw . closeTag ( "PDBFILEPATH" ) ; xw . closeTag ( "JFatCatConfig" ) ; return xw ; |
public class TargetSpecifications { /** * Finds all targets by given { @ link Target # getControllerId ( ) } s and which
* are not yet assigned to given { @ link DistributionSet } .
* @ param tIDs
* to search for .
* @ param distributionId
* set that is not yet assigned
* @ return the { @ link Target } { @ link Specification } */
public static Specification < JpaTarget > hasControllerIdAndAssignedDistributionSetIdNot ( final List < String > tIDs , @ NotNull final Long distributionId ) { } } | return ( targetRoot , query , cb ) -> cb . and ( targetRoot . get ( JpaTarget_ . controllerId ) . in ( tIDs ) , cb . or ( cb . notEqual ( targetRoot . < JpaDistributionSet > get ( JpaTarget_ . assignedDistributionSet ) . get ( JpaDistributionSet_ . id ) , distributionId ) , cb . isNull ( targetRoot . < JpaDistributionSet > get ( JpaTarget_ . assignedDistributionSet ) ) ) ) ; |
public class RelationshipResolverImpl { /** * ( non - Javadoc )
* @ see
* org . fcrepo . server . security . xacml . pdp . finder . support . RelationshipResolver # buildRESTParentHierarchy
* ( java . lang . String ) */
@ Override public String buildRESTParentHierarchy ( String pid ) throws MelcoeXacmlException { } } | Set < String > parents = getParents ( pid ) ; if ( parents == null || parents . size ( ) == 0 ) { return "/" + pid ; } String [ ] parentArray = parents . toArray ( new String [ parents . size ( ) ] ) ; return buildRESTParentHierarchy ( parentArray [ 0 ] ) + "/" + pid ; |
public class ProtobufMatcherEvalContext { /** * todo [ anistor ] missing tags need to be fired with default value defined in proto schema or null if they admit null ; missing messages need to be fired with null at end of the nesting level . BTW , seems like this is better to be included in Protostream as a feature */
@ Override public void onTag ( int fieldNumber , FieldDescriptor fieldDescriptor , Object tagValue ) { } } | if ( payloadStarted ) { if ( skipping == 0 ) { AttributeNode < FieldDescriptor , Integer > attrNode = currentNode . getChild ( fieldNumber ) ; if ( attrNode != null ) { // process only ' interesting ' tags
messageContext . markField ( fieldNumber ) ; attrNode . processValue ( tagValue , this ) ; } } } else { switch ( fieldNumber ) { case WrappedMessage . WRAPPED_DESCRIPTOR_FULL_NAME : entityTypeName = ( String ) tagValue ; break ; case WrappedMessage . WRAPPED_DESCRIPTOR_ID : entityTypeName = serializationContext . getTypeNameById ( ( Integer ) tagValue ) ; break ; case WrappedMessage . WRAPPED_MESSAGE : payload = ( byte [ ] ) tagValue ; break ; case WrappedMessage . WRAPPED_DOUBLE : case WrappedMessage . WRAPPED_FLOAT : case WrappedMessage . WRAPPED_INT64 : case WrappedMessage . WRAPPED_UINT64 : case WrappedMessage . WRAPPED_INT32 : case WrappedMessage . WRAPPED_FIXED64 : case WrappedMessage . WRAPPED_FIXED32 : case WrappedMessage . WRAPPED_BOOL : case WrappedMessage . WRAPPED_STRING : case WrappedMessage . WRAPPED_BYTES : case WrappedMessage . WRAPPED_UINT32 : case WrappedMessage . WRAPPED_SFIXED32 : case WrappedMessage . WRAPPED_SFIXED64 : case WrappedMessage . WRAPPED_SINT32 : case WrappedMessage . WRAPPED_SINT64 : case WrappedMessage . WRAPPED_ENUM : break ; // this is a primitive value , which we ignore for now due to lack of support for querying primitives
default : throw new IllegalStateException ( "Unexpected field : " + fieldNumber ) ; } } |
public class AnomalyDetectionKMeansTransform { /** * For each cluster , caches the mean distance from data points in the
* cluster to the cluster centroid . Mean distances are used later in
* anomaly score calculations . */
private void setMeanDistancesToCentroids ( ) { } } | meanDistancesToCentroids = new HashMap < > ( ) ; for ( int i = 0 ; i < clusterCentroids . numInstances ( ) ; i ++ ) { // For each centroid
int countAssignedInstances = 0 ; double sumDistancesToCentroid = 0.0 ; Instance centroidInstance = clusterCentroids . instance ( i ) ; for ( int j = 0 ; j < trainingData . numInstances ( ) ; j ++ ) { // For each data point
if ( i == centroidAssignments [ j ] ) { Instance valueInstance = trainingData . instance ( j ) ; double distanceToCentroid = Math . abs ( valueInstance . value ( 0 ) - centroidInstance . value ( 0 ) ) ; sumDistancesToCentroid += distanceToCentroid ; countAssignedInstances ++ ; } } double meanDistanceToCentroid = sumDistancesToCentroid / countAssignedInstances ; meanDistancesToCentroids . put ( centroidInstance , meanDistanceToCentroid ) ; } |
public class VirtualMachineRunCommandsInner { /** * Gets specific run command for a subscription in a location .
* @ param location The location upon which run commands is queried .
* @ param commandId The command id .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < RunCommandDocumentInner > getAsync ( String location , String commandId , final ServiceCallback < RunCommandDocumentInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( location , commandId ) , serviceCallback ) ; |
public class UserLimits { /** * Get limits from the JWT key itself , no database access needed . */
static UserLimits getLimitsFromToken ( HTTPServerConfig config , String jwtToken ) { } } | Objects . requireNonNull ( jwtToken ) ; try { String secretKey = config . getSecretTokenKey ( ) ; if ( secretKey == null ) { throw new RuntimeException ( "You specified a 'token' parameter but this server doesn't accept tokens" ) ; } Algorithm algorithm = Algorithm . HMAC256 ( secretKey ) ; DecodedJWT decodedToken ; try { JWT . require ( algorithm ) . build ( ) . verify ( jwtToken ) ; decodedToken = JWT . decode ( jwtToken ) ; } catch ( JWTDecodeException e ) { throw new AuthException ( "Could not decode token '" + jwtToken + "'" , e ) ; } Claim maxTextLengthClaim = decodedToken . getClaim ( "maxTextLength" ) ; Claim premiumClaim = decodedToken . getClaim ( "premium" ) ; boolean hasPremium = ! premiumClaim . isNull ( ) && premiumClaim . asBoolean ( ) ; Claim uidClaim = decodedToken . getClaim ( "uid" ) ; long uid = uidClaim . isNull ( ) ? - 1 : uidClaim . asLong ( ) ; return new UserLimits ( maxTextLengthClaim . isNull ( ) ? config . maxTextLength : maxTextLengthClaim . asInt ( ) , config . maxCheckTimeMillis , hasPremium ? uid : null ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } |
public class YearMonthDay { /** * Returns a copy of this date with the specified chronology .
* This instance is immutable and unaffected by this method call .
* This method retains the values of the fields , thus the result will
* typically refer to a different instant .
* The time zone of the specified chronology is ignored , as YearMonthDay
* operates without a time zone .
* @ param newChronology the new chronology , null means ISO
* @ return a copy of this datetime with a different chronology
* @ throws IllegalArgumentException if the values are invalid for the new chronology */
public YearMonthDay withChronologyRetainFields ( Chronology newChronology ) { } } | newChronology = DateTimeUtils . getChronology ( newChronology ) ; newChronology = newChronology . withUTC ( ) ; if ( newChronology == getChronology ( ) ) { return this ; } else { YearMonthDay newYearMonthDay = new YearMonthDay ( this , newChronology ) ; newChronology . validate ( newYearMonthDay , getValues ( ) ) ; return newYearMonthDay ; } |
public class RulesController { /** * POST / rules
* Creates a new rule . The URL of the new rule will be returned in the
* Location header .
* You can also POST a list of rules to do batch creation .
* Optional extra headers : Comment - A comment that will appear in the
* change log . User - End - user who requested the change .
* @ param request
* @ return
* @ throws IOException */
@ SuppressWarnings ( "unchecked" ) public ModelAndView postNewRule ( HttpServletRequest request ) throws IOException { } } | Object data = view . deserializeRequest ( request ) ; Collection < Rule > rules ; if ( data instanceof Collection ) { rules = ( Collection < Rule > ) data ; } else if ( data instanceof Rule ) { rules = new LinkedList < Rule > ( ) ; rules . add ( ( Rule ) data ) ; } else { return new ModelAndView ( view , "object" , new SimpleError ( "Expected a rule or collection of rules." , 400 ) ) ; } long id = 0 ; for ( Rule rule : rules ) { rule . setId ( null ) ; String comment = "Created by REST client: " + request . getHeader ( "User-agent" ) ; if ( request . getHeader ( "Comment" ) != null ) { comment = request . getHeader ( "Comment" ) ; } String user = "" + request . getRemoteUser ( ) + "@" + request . getRemoteAddr ( ) ; if ( request . getHeader ( "User" ) != null ) { user = request . getHeader ( "User" ) + " via " + user ; } RuleChange change = new RuleChange ( rule , RuleChange . CREATED , new Date ( ) , user , comment ) ; ruleDao . saveRule ( rule , change ) ; id = rule . getId ( ) ; } return new ModelAndView ( new CreatedView ( "/rules/" + id ) ) ; |
public class AbstractSettings { /** * / * ( non - Javadoc )
* @ see nyla . solutions . core . util . Settings # getPropertyStrings ( java . lang . Class , java . lang . String , java . lang . String [ ] ) */
@ Override public String [ ] getPropertyStrings ( Class < ? > aClass , String key , String [ ] aDefault ) { } } | String property = getProperty ( aClass , key , "" ) ; if ( "" . equals ( property ) ) return aDefault ; return Text . split ( property ) ; |
public class LoopStatement { /** * A helper method for creating Iterators for use with ' exists ' and ' foreach '
* elements . Primarily for use with generated Java code ( not necessary for
* direct interpretation ) . */
public static Iterator makeIterator ( Object obj , IType typeHint ) { } } | if ( obj == null ) { return null ; } if ( typeHint . isArray ( ) && ( obj . getClass ( ) . isArray ( ) || TypeSystem . getFromObject ( obj ) . isArray ( ) ) ) { return new ArrayIterator ( obj , typeHint ) ; } if ( obj instanceof Iterable ) { return ( ( Iterable ) obj ) . iterator ( ) ; } if ( obj instanceof Iterator ) { return ( Iterator ) obj ; } // Treat a string as a list of characters
if ( obj instanceof String ) { return new StringIterator ( ( String ) obj ) ; } if ( obj instanceof Number ) { return new NumberIterator ( ( Number ) obj ) ; } // Oh well . Convert to a List of length one and iterate that single element .
return Collections . nCopies ( 1 , obj ) . iterator ( ) ; |
public class XMLUtils { /** * Look up namespace attribute declarations in the XML fragment and
* store them in a binding map , where the key is the namespace prefix and the value
* is the namespace uri .
* @ param xml XML fragment .
* @ return map containing namespace prefix - namespace uri pairs . */
public static Map < String , String > lookupNamespaces ( String xml ) { } } | Map < String , String > namespaces = new HashMap < String , String > ( ) ; // TODO : handle inner CDATA sections because namespaces they might interfere with real namespaces in xml fragment
if ( xml . indexOf ( XMLConstants . XMLNS_ATTRIBUTE ) != - 1 ) { String [ ] tokens = StringUtils . split ( xml , XMLConstants . XMLNS_ATTRIBUTE ) ; do { String token = tokens [ 1 ] ; String nsPrefix ; if ( token . startsWith ( ":" ) ) { nsPrefix = token . substring ( 1 , token . indexOf ( '=' ) ) ; } else if ( token . startsWith ( "=" ) ) { nsPrefix = XMLConstants . DEFAULT_NS_PREFIX ; } else { // we have found a " xmlns " phrase that is no namespace attribute - ignore and continue
tokens = StringUtils . split ( token , XMLConstants . XMLNS_ATTRIBUTE ) ; continue ; } String nsUri ; try { nsUri = token . substring ( token . indexOf ( '\"' ) + 1 , token . indexOf ( '\"' , token . indexOf ( '\"' ) + 1 ) ) ; } catch ( StringIndexOutOfBoundsException e ) { // maybe we have more luck with single " ' "
nsUri = token . substring ( token . indexOf ( '\'' ) + 1 , token . indexOf ( '\'' , token . indexOf ( '\'' ) + 1 ) ) ; } namespaces . put ( nsPrefix , nsUri ) ; tokens = StringUtils . split ( token , XMLConstants . XMLNS_ATTRIBUTE ) ; } while ( tokens != null ) ; } return namespaces ; |
public class Kidnummer { /** * Create a valid KID numer of the wanted length , using MOD10.
* Input is padded with leading zeros to reach wanted target length
* @ param baseNumber base number to calculate checksum digit for
* @ param targetLength wanted length , 0 - padded . Between 2-25
* @ return Kidnummer */
public static Kidnummer mod10Kid ( String baseNumber , int targetLength ) { } } | if ( baseNumber . length ( ) >= targetLength ) throw new IllegalArgumentException ( "baseNumber too long" ) ; String padded = String . format ( "%0" + ( targetLength - 1 ) + "d" , new BigInteger ( baseNumber ) ) ; Kidnummer k = new Kidnummer ( padded + "0" ) ; return KidnummerValidator . getKidnummer ( padded + calculateMod10CheckSum ( getMod10Weights ( k ) , k ) ) ; |
public class _AjaxBehaviorDeltaStateHelper { /** * Used to create delta map on demand
* @ return */
private boolean _createDeltas ( ) { } } | if ( isInitialStateMarked ( ) ) { if ( _deltas == null ) { _deltas = new HashMap < Serializable , Object > ( 2 ) ; } return true ; } return false ; |
public class AbstractBusPrimitive { /** * This function is invoked by the attribute provider each time
* an attribute has changed .
* < p > You should override this method to provide several feedback
* to the BusPrimitive for instance .
* @ param name is the name of the attribute that changed */
protected void onAttributeChanged ( String name ) { } } | Object v = null ; try { v = getAttribute ( name ) . getValue ( ) ; } catch ( Exception exception ) { } if ( ATTR_COLOR . equalsIgnoreCase ( name ) ) { fireGraphicalAttributeChanged ( name , null , v ) ; } else { firePrimitiveChanged ( name , null , v ) ; } |
public class HeidelTimeStandalone { /** * Method that initializes all vital prerequisites , including POS Tagger
* @ param languageLanguage to be processed with this copy of HeidelTime
* @ param typeToProcessDomain type to be processed
* @ param outputTypeOutput type
* @ param configPathPath to the configuration file for HeidelTimeStandalone
* @ param posTaggerPOS Tagger to use for preprocessing */
public void initialize ( Language language , DocumentType typeToProcess , OutputType outputType , String configPath , POSTagger posTagger ) { } } | initialize ( language , typeToProcess , outputType , configPath , posTagger , false ) ; |
public class SlaProxy { /** * Creates proxied HTTP DELETE request to SeaClouds SLA core which removes the SLA from the SLA Core
* @ param agreementId of the SLA Agreement to be removed . This ID may differ from SeaClouds Application ID
* @ return String representing that the Agreement was removed properly */
public String removeAgreement ( String agreementId ) { } } | Invocation invocation = getJerseyClient ( ) . target ( getEndpoint ( ) + "/agreements/" + agreementId ) . request ( ) . header ( "Accept" , MediaType . APPLICATION_JSON ) . header ( "Content-Type" , MediaType . APPLICATION_JSON ) . buildDelete ( ) ; // SLA Core returns a text message if the response was succesfully not the object , this is not the best behaviour
return invocation . invoke ( ) . readEntity ( String . class ) ; |
public class ComputerVisionImpl { /** * Recognize Text operation . When you use the Recognize Text interface , the response contains a field called ' Operation - Location ' . The ' Operation - Location ' field contains the URL that you must use for your Get Recognize Text Operation Result operation .
* @ param image An image stream .
* @ param mode Type of text to recognize . Possible values include : ' Handwritten ' , ' Printed '
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > recognizeTextInStreamAsync ( byte [ ] image , TextRecognitionMode mode , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( recognizeTextInStreamWithServiceResponseAsync ( image , mode ) , serviceCallback ) ; |
public class DevFailedUtils { /** * Convert a DevFailed to a String
* @ param e
* @ return */
public static void logDevFailed ( final DevFailed e , final Logger logger ) { } } | if ( e . errors != null ) { for ( int i = 0 ; i < e . errors . length ; i ++ ) { logger . error ( "Error Level {} :" , i ) ; logger . error ( "\t - desc: {}" , e . errors [ i ] . desc ) ; logger . error ( "\t - origin: {}" , e . errors [ i ] . origin ) ; logger . error ( "\t - reason: {}" , e . errors [ i ] . reason ) ; String sev = "" ; if ( e . errors [ i ] . severity . value ( ) == ErrSeverity . ERR . value ( ) ) { sev = "ERROR" ; } else if ( e . errors [ i ] . severity . value ( ) == ErrSeverity . PANIC . value ( ) ) { sev = "PANIC" ; } else if ( e . errors [ i ] . severity . value ( ) == ErrSeverity . WARN . value ( ) ) { sev = "WARN" ; } logger . error ( "\t - severity: {}" , sev ) ; } } else { logger . error ( "EMPTY DevFailed" ) ; } |
public class SshProvider { /** * Start ssh session and obtain session .
* @ return the session */
public Session startSshSessionAndObtainSession ( ) { } } | Session session = null ; try { JSch jsch = new JSch ( ) ; if ( sshMeta . getSshLoginType ( ) == SshLoginType . KEY ) { String workingDir = System . getProperty ( "user.dir" ) ; String privKeyAbsPath = workingDir + "/" + sshMeta . getPrivKeyRelativePath ( ) ; logger . debug ( "use privkey: path: " + privKeyAbsPath ) ; if ( ! PcFileNetworkIoUtils . isFileExist ( privKeyAbsPath ) ) { throw new RuntimeException ( "file not found at " + privKeyAbsPath ) ; } if ( sshMeta . isPrivKeyUsePassphrase ( ) && sshMeta . getPassphrase ( ) != null ) { jsch . addIdentity ( privKeyAbsPath , sshMeta . getPassphrase ( ) ) ; } else { jsch . addIdentity ( privKeyAbsPath ) ; } } session = jsch . getSession ( sshMeta . getUserName ( ) , targetHost , sshMeta . getSshPort ( ) ) ; if ( sshMeta . getSshLoginType ( ) == SshLoginType . PASSWORD ) { session . setPassword ( sshMeta . getPassword ( ) ) ; } session . setConfig ( "StrictHostKeyChecking" , "no" ) ; } catch ( Exception t ) { throw new RuntimeException ( t ) ; } return session ; |
public class Requests { /** * Tag any of the resources we specify .
* @ param resources
* @ param tags */
private void tagResources ( List < String > resources , List < Tag > tags ) { } } | // Create a tag request .
CreateTagsRequest createTagsRequest = new CreateTagsRequest ( ) ; createTagsRequest . setResources ( resources ) ; createTagsRequest . setTags ( tags ) ; // Try to tag the Spot request submitted .
try { ec2 . createTags ( createTagsRequest ) ; } catch ( AmazonServiceException e ) { // Write out any exceptions that may have occurred .
System . out . println ( "Error terminating instances" ) ; System . out . println ( "Caught Exception: " + e . getMessage ( ) ) ; System . out . println ( "Reponse Status Code: " + e . getStatusCode ( ) ) ; System . out . println ( "Error Code: " + e . getErrorCode ( ) ) ; System . out . println ( "Request ID: " + e . getRequestId ( ) ) ; } |
public class ManagedThreadFactoryService { /** * DS method to activate this component .
* Best practice : this should be a protected method , not public or private
* @ param componentContext DeclarativeService defined / populated component context */
@ Trivial protected void activate ( ComponentContext componentContext ) { } } | Dictionary < String , ? > properties = componentContext . getProperties ( ) ; final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "activate" , properties ) ; contextSvcRef . activate ( componentContext ) ; String jndiName = ( String ) properties . get ( JNDI_NAME ) ; name = jndiName == null ? ( String ) properties . get ( CONFIG_ID ) : jndiName ; defaultExecutionProperties = new TreeMap < String , String > ( ) ; defaultExecutionProperties . put ( WSContextService . DEFAULT_CONTEXT , WSContextService . UNCONFIGURED_CONTEXT_TYPES ) ; defaultExecutionProperties . put ( WSContextService . TASK_OWNER , name ) ; createDaemonThreads = ( Boolean ) properties . get ( CREATE_DAEMON_THREADS ) ; defaultPriority = ( Integer ) properties . get ( DEFAULT_PRIORITY ) ; Integer maxPriority = ( Integer ) properties . get ( MAX_PRIORITY ) ; threadGroup = AccessController . doPrivileged ( new CreateThreadGroupAction ( name + " Thread Group" , maxPriority ) , threadGroupTracker . serverAccessControlContext ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "activate" ) ; |
public class UF5 { /** * Evaluate ( ) method */
@ Override public void evaluate ( DoubleSolution solution ) { } } | double [ ] x = new double [ getNumberOfVariables ( ) ] ; for ( int i = 0 ; i < solution . getNumberOfVariables ( ) ; i ++ ) { x [ i ] = solution . getVariableValue ( i ) ; } int count1 , count2 ; double sum1 , sum2 , yj , hj ; sum1 = sum2 = 0.0 ; count1 = count2 = 0 ; for ( int j = 2 ; j <= getNumberOfVariables ( ) ; j ++ ) { yj = x [ j - 1 ] - Math . sin ( 6.0 * Math . PI * x [ 0 ] + j * Math . PI / getNumberOfVariables ( ) ) ; hj = 2.0 * yj * yj - Math . cos ( 4.0 * Math . PI * yj ) + 1.0 ; if ( j % 2 == 0 ) { sum2 += hj ; count2 ++ ; } else { sum1 += hj ; count1 ++ ; } } hj = ( 0.5 / n + epsilon ) * Math . abs ( Math . sin ( 2.0 * n * Math . PI * x [ 0 ] ) ) ; solution . setObjective ( 0 , x [ 0 ] + hj + 2.0 * sum1 / ( double ) count1 ) ; solution . setObjective ( 1 , 1.0 - x [ 0 ] + hj + 2.0 * sum2 / ( double ) count2 ) ; |
public class OoPlaceKroneckerProduct { /** * TODO : It should not be common . */
@ Override public Matrix applyCommon ( Matrix a , Matrix b ) { } } | int n = a . rows ( ) * b . rows ( ) ; int m = a . columns ( ) * b . columns ( ) ; int p = b . rows ( ) ; int q = b . columns ( ) ; Matrix result = a . blankOfShape ( n , m ) ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = 0 ; j < m ; j ++ ) { result . set ( i , j , a . get ( i / p , j / q ) * b . get ( i % p , j % q ) ) ; } } return result ; |
public class URI { /** * Set the port for this URI . - 1 is used to indicate that the port is
* not specified , otherwise valid port numbers are between 0 and 65535.
* If a valid port number is passed in and the host field is null ,
* an exception is thrown .
* @ param p _ port the port number for this URI
* @ throws MalformedURIException if p _ port is not - 1 and not a
* valid port number */
public void setPort ( int p_port ) throws MalformedURIException { } } | if ( p_port >= 0 && p_port <= 65535 ) { if ( m_host == null ) { throw new MalformedURIException ( Utils . messages . createMessage ( MsgKey . ER_PORT_WHEN_HOST_NULL , null ) ) ; // " Port cannot be set when host is null ! " ) ;
} } else if ( p_port != - 1 ) { throw new MalformedURIException ( Utils . messages . createMessage ( MsgKey . ER_INVALID_PORT , null ) ) ; // " Invalid port number ! " ) ;
} m_port = p_port ; |
public class PolicyStatesInner { /** * Summarizes policy states for the resource .
* @ param resourceId Resource ID .
* @ param queryOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws QueryFailureException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the SummarizeResultsInner object if successful . */
public SummarizeResultsInner summarizeForResource ( String resourceId , QueryOptions queryOptions ) { } } | return summarizeForResourceWithServiceResponseAsync ( resourceId , queryOptions ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class AbstractValueExpression { /** * Otherwise , there is no binding possible , indicated by a null return . */
@ Override public List < AbstractExpression > bindingToIndexedExpression ( AbstractExpression expr ) { } } | if ( equals ( expr ) ) { return s_reusableImmutableEmptyBinding ; } return null ; |
public class WordForm { /** * setter for POS - sets POS tag for a given form
* @ generated
* @ param v value to set into the feature */
public void setPOS ( String v ) { } } | if ( WordForm_Type . featOkTst && ( ( WordForm_Type ) jcasType ) . casFeat_POS == null ) jcasType . jcas . throwFeatMissing ( "POS" , "com.digitalpebble.rasp.WordForm" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( WordForm_Type ) jcasType ) . casFeatCode_POS , v ) ; |
public class StrategyClassLoader { /** * ( non - Javadoc )
* @ see java . lang . ClassLoader # findClass ( java . lang . String ) */
@ Override protected Class < ? > findClass ( final String name ) throws ClassNotFoundException { } } | ByteBuffer def = AccessController . doPrivileged ( new PrivilegedAction < ByteBuffer > ( ) { public ByteBuffer run ( ) { return strategy . getClassDefinition ( name ) ; } } ) ; if ( def != null ) { Class < ? > result = super . defineClass ( name , def , null ) ; if ( result != null ) { super . resolveClass ( result ) ; return result ; } } throw new ClassNotFoundException ( name ) ; |
public class Parser { /** * syck _ parser _ add _ level */
public void addLevel ( int len , LevelStatus status ) { } } | if ( lvl_idx + 1 > lvl_capa ) { lvl_capa += YAML . ALLOC_CT ; levels = YAML . realloc ( levels , lvl_capa ) ; } levels [ lvl_idx ] = new Level ( ) ; levels [ lvl_idx ] . spaces = len ; levels [ lvl_idx ] . ncount = 0 ; levels [ lvl_idx ] . domain = levels [ lvl_idx - 1 ] . domain ; levels [ lvl_idx ] . status = status ; lvl_idx ++ ; |
public class ApiOvhPackxdsl { /** * Get this object properties
* REST : GET / pack / xdsl / { packName } / exchangeLite / services / { domain }
* @ param packName [ required ] The internal name of your pack
* @ param domain [ required ] */
public OvhExchangeLiteService packName_exchangeLite_services_domain_GET ( String packName , String domain ) throws IOException { } } | String qPath = "/pack/xdsl/{packName}/exchangeLite/services/{domain}" ; StringBuilder sb = path ( qPath , packName , domain ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhExchangeLiteService . class ) ; |
public class ModelLoaderUtils { /** * Deserialize the contents of a given configuration file .
* @ param clzz Class to deserialize into
* @ param configurationFileLocation Location of config file to load
* @ return Marshalled configuration class */
public static < T > T loadConfigurationModel ( Class < T > clzz , String configurationFileLocation ) { } } | System . out . println ( "Loading config file " + configurationFileLocation ) ; InputStream fileContents = null ; try { fileContents = getRequiredResourceAsStream ( configurationFileLocation ) ; return Jackson . load ( clzz , fileContents ) ; } catch ( IOException e ) { System . err . println ( "Failed to read the configuration file " + configurationFileLocation ) ; throw new RuntimeException ( e ) ; } finally { if ( fileContents != null ) { Utils . closeQuietly ( fileContents ) ; } } |
public class SarlConstructorBuilderImpl { /** * Add a formal parameter .
* @ param name the name of the formal parameter . */
public IFormalParameterBuilder addParameter ( String name ) { } } | IFormalParameterBuilder builder = this . parameterProvider . get ( ) ; builder . eInit ( this . sarlConstructor , name , getTypeResolutionContext ( ) ) ; return builder ; |
public class MatrixFunctions { /** * Applies the trigonometric < i > arctangend < / i > function element wise on this
* matrix . Note that this is an in - place operation .
* @ see MatrixFunctions # atan ( DoubleMatrix )
* @ return this matrix */
public static DoubleMatrix atani ( DoubleMatrix x ) { } } | /* # mapfct ( ' Math . atan ' ) # */
// RJPP - BEGIN - - - - -
for ( int i = 0 ; i < x . length ; i ++ ) x . put ( i , ( double ) Math . atan ( x . get ( i ) ) ) ; return x ; // RJPP - END - - - - - |
public class AbstractNotificationListener { /** * { @ inheritDoc } */
public void join ( Address address ) { } } | if ( trace ) log . tracef ( "join(%s)" , address ) ; Map < Address , Long > sr = shortRunning . get ( address . getWorkManagerId ( ) ) ; if ( sr == null ) sr = Collections . synchronizedMap ( new HashMap < Address , Long > ( ) ) ; sr . put ( address , Long . valueOf ( 0 ) ) ; shortRunning . put ( address . getWorkManagerId ( ) , sr ) ; Map < Address , Long > lr = longRunning . get ( address . getWorkManagerId ( ) ) ; if ( lr == null ) lr = Collections . synchronizedMap ( new HashMap < Address , Long > ( ) ) ; lr . put ( address , Long . valueOf ( 0 ) ) ; longRunning . put ( address . getWorkManagerId ( ) , lr ) ; |
public class RegularCellsMapper { /** * Returns the columns contained in the regular cells specified { @ link Row } . Note that not all the contained columns
* are returned , but only the regular cell ones .
* @ param row A { @ link Row } .
* @ return The columns contained in the regular cells specified { @ link Row } . */
@ SuppressWarnings ( "rawtypes" ) public Columns columns ( Row row ) { } } | ColumnFamily columnFamily = row . cf ; Columns columns = new Columns ( ) ; // Get row ' s columns iterator skipping clustering column
Iterator < Cell > cellIterator = columnFamily . iterator ( ) ; cellIterator . next ( ) ; // Stuff for grouping collection columns ( sets , lists and maps )
String name ; CollectionType collectionType ; while ( cellIterator . hasNext ( ) ) { Cell cell = cellIterator . next ( ) ; CellName cellName = cell . name ( ) ; ColumnDefinition columnDefinition = metadata . getColumnDefinition ( cellName ) ; if ( columnDefinition == null ) { continue ; } AbstractType < ? > valueType = columnDefinition . type ; ByteBuffer cellValue = cell . value ( ) ; name = cellName . cql3ColumnName ( metadata ) . toString ( ) ; if ( valueType . isCollection ( ) ) { collectionType = ( CollectionType < ? > ) valueType ; switch ( collectionType . kind ) { case SET : { AbstractType < ? > type = collectionType . nameComparator ( ) ; ByteBuffer value = cellName . collectionElement ( ) ; columns . add ( Column . fromDecomposed ( name , value , type ) ) ; break ; } case LIST : { AbstractType < ? > type = collectionType . valueComparator ( ) ; columns . add ( Column . fromDecomposed ( name , cellValue , type ) ) ; break ; } case MAP : { AbstractType < ? > type = collectionType . valueComparator ( ) ; ByteBuffer keyValue = cellName . collectionElement ( ) ; AbstractType < ? > keyType = collectionType . nameComparator ( ) ; String nameSufix = keyType . compose ( keyValue ) . toString ( ) ; columns . add ( Column . fromDecomposed ( name , nameSufix , cellValue , type ) ) ; break ; } } } else { columns . add ( Column . fromDecomposed ( name , cellValue , valueType ) ) ; } } return columns ; |
public class JsonStreamWriter { /** * Write a double attribute .
* @ param name attribute name
* @ param value attribute value */
public void writeNameValuePair ( String name , double value ) throws IOException { } } | internalWriteNameValuePair ( name , Double . toString ( value ) ) ; |
public class MultipleKeywordTree { /** * Checks whether the character is related to one of the current nodes ( the
* root node is always a current node ) .
* After the comparison the list of current nodes will be replaced .
* @ param c
* character
* @ return TRUE if successor nodes could be identified FALSE otherwise */
public boolean check ( final char c ) { } } | List < LetterNode < V > > newList = new ArrayList < LetterNode < V > > ( ) ; newList . add ( root ) ; LetterNode < V > current ; hits . clear ( ) ; int size = this . currentList . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { current = this . currentList . get ( i ) ; current = current . get ( c ) ; if ( current != null ) { newList . add ( current ) ; if ( current . isKeyword ( ) ) { hits . add ( current ) ; } } } this . currentList = newList ; return ! hits . isEmpty ( ) ; |
public class NumberFormatterBase { /** * Parse a string as a number pattern . */
protected static NumberPattern parse ( String pattern ) { } } | return NUMBER_PATTERN_CACHE . computeIfAbsent ( pattern , s -> new NumberPatternParser ( ) . parse ( s ) ) ; |
public class MockException { /** * Mock exception happens by possibility
* @ param possible
* @ param exceptionFactory
* @ throws E */
public static < E extends Exception > void possible ( double possible , Supplier < E > exceptionFactory ) throws E { } } | if ( Math . random ( ) < possible ) { throw exceptionFactory . get ( ) ; } |
public class TrmMeLinkRequestImpl { /** * Get the requesting ME UUID from the message .
* Javadoc description supplied by TrmMeLinkRequest interface . */
public SIBUuid8 getRequestingMeUuid ( ) { } } | byte [ ] b = ( byte [ ] ) jmo . getField ( TrmFirstContactAccess . BODY_MELINKREQUEST_REQUESTINGMEUUID ) ; if ( b != null ) return new SIBUuid8 ( b ) ; return null ; |
public class StringHelper { /** * Get a concatenated String from all non - < code > null < / code > and non empty
* elements of the passed container without a separator string . This the very
* generic version of { @ link # getConcatenatedOnDemand ( String , String ) } for an
* arbitrary number of elements .
* @ param aElements
* The container to convert . May be < code > null < / code > or empty .
* @ param aMapper
* The mapping function to convert from ELEMENTTYPE to String . May not be
* < code > null < / code > .
* @ return The concatenated string .
* @ param < ELEMENTTYPE >
* Iterable element type
* @ since 8.5.6 */
@ Nonnull public static < ELEMENTTYPE > String getImplodedMappedNonEmpty ( @ Nullable final Iterable < ? extends ELEMENTTYPE > aElements , @ Nonnull final Function < ? super ELEMENTTYPE , String > aMapper ) { } } | ValueEnforcer . notNull ( aMapper , "Mapper" ) ; final StringBuilder aSB = new StringBuilder ( ) ; if ( aElements != null ) for ( final ELEMENTTYPE aElement : aElements ) { final String sElement = aMapper . apply ( aElement ) ; if ( hasText ( sElement ) ) aSB . append ( sElement ) ; } return aSB . toString ( ) ; |
public class JCGLTextureFormats { /** * Check that the texture is of a depth ( not stencil ) renderable format .
* @ param t The texture format
* @ throws JCGLExceptionFormatError If the texture is not of the correct
* format */
public static void checkDepthOnlyRenderableTexture2D ( final JCGLTextureFormat t ) throws JCGLExceptionFormatError { } } | if ( ! isDepthRenderable ( t ) ) { final String m = String . format ( "Format %s is not depth-renderable" , t ) ; assert m != null ; throw new JCGLExceptionFormatError ( m ) ; } if ( isStencilRenderable ( t ) ) { final String m = String . format ( "Format %s is stencil-renderable: Must be used as a depth+stencil " + "attachment" , t ) ; assert m != null ; throw new JCGLExceptionFormatError ( m ) ; } |
public class CreateDefaultVpcRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < CreateDefaultVpcRequest > getDryRunRequest ( ) { } } | Request < CreateDefaultVpcRequest > request = new CreateDefaultVpcRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class CronetClientTransport { /** * When the transport is in goAway state , we should stop it once all active streams finish . */
void stopIfNecessary ( ) { } } | synchronized ( lock ) { if ( goAway && ! stopped && streams . size ( ) == 0 ) { stopped = true ; } else { return ; } } listener . transportTerminated ( ) ; |
public class XmlObjectSerializer { /** * Inform the serializer that the given { @ link ElementDescriptor } will be used . This allows the serializer to bind a prefix early .
* @ param elementDescriptor
* The { @ link ElementDescriptor } that will be used . */
public void useNamespace ( SerializerContext serializerContext , ElementDescriptor < ? > elementDescriptor ) { } } | useNamespace ( serializerContext , elementDescriptor . qualifiedName . namespace ) ; |
public class ServicesInner { /** * Check service health status .
* The services resource is the top - level resource that represents the Data Migration Service . This action performs a health check and returns the status of the service and virtual machine size .
* @ param groupName Name of the resource group
* @ param serviceName Name of the service
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DataMigrationServiceStatusResponseInner object */
public Observable < DataMigrationServiceStatusResponseInner > checkStatusAsync ( String groupName , String serviceName ) { } } | return checkStatusWithServiceResponseAsync ( groupName , serviceName ) . map ( new Func1 < ServiceResponse < DataMigrationServiceStatusResponseInner > , DataMigrationServiceStatusResponseInner > ( ) { @ Override public DataMigrationServiceStatusResponseInner call ( ServiceResponse < DataMigrationServiceStatusResponseInner > response ) { return response . body ( ) ; } } ) ; |
public class ConfigurationItemMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ConfigurationItem configurationItem , ProtocolMarshaller protocolMarshaller ) { } } | if ( configurationItem == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( configurationItem . getVersion ( ) , VERSION_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getConfigurationItemCaptureTime ( ) , CONFIGURATIONITEMCAPTURETIME_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getConfigurationItemStatus ( ) , CONFIGURATIONITEMSTATUS_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getConfigurationStateId ( ) , CONFIGURATIONSTATEID_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getConfigurationItemMD5Hash ( ) , CONFIGURATIONITEMMD5HASH_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getResourceName ( ) , RESOURCENAME_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getAwsRegion ( ) , AWSREGION_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getAvailabilityZone ( ) , AVAILABILITYZONE_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getResourceCreationTime ( ) , RESOURCECREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getRelatedEvents ( ) , RELATEDEVENTS_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getRelationships ( ) , RELATIONSHIPS_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getConfiguration ( ) , CONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( configurationItem . getSupplementaryConfiguration ( ) , SUPPLEMENTARYCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class WebSocketNativeHandler { /** * Connect to the WebSocket
* @ throws Exception */
@ Override public void processConnect ( WebSocketChannel channel , WSURI location , String [ ] protocols ) { } } | LOG . entering ( CLASS_NAME , "connect" , channel ) ; nextHandler . processConnect ( channel , location , protocols ) ; |
public class DOM { /** * Extract a boolean value from { @ code node } or return { @ code defaultValue }
* if there is no boolean value at { @ code xpath }
* @ param node the node with the wanted attribute .
* @ param xpath the path to extract .
* @ param defaultValue the default value .
* @ return the value of the path , if existing , else
* { @ code defaultValue } */
public static Boolean selectBoolean ( Node node , String xpath , Boolean defaultValue ) { } } | return selector . selectBoolean ( node , xpath , defaultValue ) ; |
public class BatchMutation { /** * Add a SuperColumn insertion ( or update ) to the batch mutation request . */
public BatchMutation < K > addSuperInsertion ( K key , List < String > columnFamilies , SuperColumn superColumn ) { } } | Mutation mutation = new Mutation ( ) ; mutation . setColumn_or_supercolumn ( new ColumnOrSuperColumn ( ) . setSuper_column ( superColumn ) ) ; addMutation ( key , columnFamilies , mutation ) ; return this ; |
public class DDM { /** * Adds a new boolean trial to the detector , with the goal of detecting when
* the number of successful trials ( { @ code true } ) drifts to a new value .
* This detector begins storing a history of the { @ code obj } inputs only
* once it has entered a warning state . < br >
* This detector is specifically meant to detect drops in the success rate ,
* and will not cause any warning or drift detections for increases in the
* success rate .
* @ param trial the result of the trial
* @ param obj the object to associate with the trial
* @ return { @ code true } if we are in a warning or drift state ,
* { @ code false } if we are not */
public boolean addSample ( boolean trial , V obj ) { } } | if ( drifting ) throw new UnhandledDriftException ( ) ; if ( ! trial ) fails ++ ; time ++ ; if ( time < minSamples ) return false ; final double p_i = fails / ( double ) time ; final double s_i = Math . sqrt ( p_i * ( 1 - p_i ) / time ) ; final double ps = p_i + s_i ; // values are updated when pi + si is lower than pmin + smin
if ( ps < p_min + s_min ) { p_min = p_i ; s_min = s_i ; } if ( ps > p_min + warningThreshold * s_min ) { if ( ! warning ) // first entry
{ warning = true ; driftStart = time - 1 ; } addToHistory ( obj ) ; if ( ps > p_min + driftThreshold * s_min ) { warning = false ; drifting = true ; } return true ; } else // everything is good
{ warning = false ; driftStart = - 1 ; clearHistory ( ) ; return false ; } |
public class ViewsTransitionAnimator { /** * Replaces old animator with new one preserving state . */
private void swapAnimator ( ViewPositionAnimator old , ViewPositionAnimator next ) { } } | final float position = old . getPosition ( ) ; final boolean isLeaving = old . isLeaving ( ) ; final boolean isAnimating = old . isAnimating ( ) ; if ( GestureDebug . isDebugAnimator ( ) ) { Log . d ( TAG , "Swapping animator for " + getRequestedId ( ) ) ; } cleanupAnimator ( old ) ; if ( getFromView ( ) != null ) { next . enter ( getFromView ( ) , false ) ; } else if ( getFromPos ( ) != null ) { next . enter ( getFromPos ( ) , false ) ; } initAnimator ( next ) ; next . setState ( position , isLeaving , isAnimating ) ; |
public class FixedURLGenerator { /** * Generate the fixed urls and sets it where required for a content specification .
* @ param contentSpec The content spec to generate fixed urls for .
* @ param missingOnly Generate only the missing fixed urls .
* @ param fixedUrlPropertyTagId The Fixed URL Property Tag ID . */
public static void generateFixedUrls ( final ContentSpec contentSpec , boolean missingOnly , final Integer fixedUrlPropertyTagId ) { } } | final Set < String > existingFixedUrls = new HashSet < String > ( ) ; final Set < SpecNode > nodesWithoutFixedUrls = new HashSet < SpecNode > ( ) ; final List < SpecNode > specNodes = getAllSpecNodes ( contentSpec ) ; // Collect any current fixed urls or nodes that need configuring
if ( missingOnly ) { collectFixedUrlInformation ( specNodes , nodesWithoutFixedUrls , existingFixedUrls ) ; } generateFixedUrlForNodes ( nodesWithoutFixedUrls , existingFixedUrls , fixedUrlPropertyTagId ) ; |
public class IDOS { /** * Computes all IDOS scores .
* @ param ids the DBIDs to process
* @ param knnQ the KNN query
* @ param intDims Precomputed intrinsic dimensionalities
* @ param idosminmax Output of minimum and maximum , for metadata
* @ return ID scores */
protected DoubleDataStore computeIDOS ( DBIDs ids , KNNQuery < O > knnQ , DoubleDataStore intDims , DoubleMinMax idosminmax ) { } } | WritableDoubleDataStore ldms = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "ID Outlier Scores for objects" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnQ . getKNNForDBID ( iter , k_r ) ; double sum = 0. ; int cnt = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( iter , neighbor ) ) { continue ; } final double id = intDims . doubleValue ( neighbor ) ; sum += id > 0 ? 1.0 / id : 0. ; if ( ++ cnt == k_r ) { // Always stop after at most k _ r elements .
break ; } } final double id_q = intDims . doubleValue ( iter ) ; final double idos = id_q > 0 ? id_q * sum / cnt : 0. ; ldms . putDouble ( iter , idos ) ; idosminmax . put ( idos ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return ldms ; |
public class TransformGZipBytes { /** * Transforms the data from the class attribute to the object required by the datasource
* @ param cpoAdapter The CpoAdapter for the datasource where the attribute is being persisted
* @ param parentObject The object that contains the attribute being persisted .
* @ param attributeObject The object that represents the attribute being persisted .
* @ return The object to be stored in the datasource
* @ throws CpoException */
@ Override public byte [ ] transformOut ( JdbcPreparedStatementFactory jpsf , byte [ ] attributeObject ) throws CpoException { } } | byte [ ] retBytes = null ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; try { if ( attributeObject != null ) { if ( attributeObject . length > 0 ) { GZIPOutputStream os = new GZIPOutputStream ( baos ) ; os . write ( attributeObject ) ; os . flush ( ) ; os . close ( ) ; baos . flush ( ) ; baos . close ( ) ; retBytes = baos . toByteArray ( ) ; } else { retBytes = new byte [ 0 ] ; } } } catch ( Exception e ) { String msg = "Error GZipping Byte Array" ; logger . error ( msg , e ) ; throw new CpoException ( msg , e ) ; } return retBytes ; |
public class BaseMessage { /** * Setup this message given this internal data structure .
* @ param data The data in an intermediate format . */
public void createMessageDataDesc ( String strMessageDataClassName ) { } } | if ( strMessageDataClassName == null ) { if ( this . getMessageHeader ( ) != null ) strMessageDataClassName = ( String ) this . getMessageHeader ( ) . get ( BaseMessageHeader . INTERNAL_MESSAGE_CLASS ) ; } MessageRecordDesc messageRecordDesc = ( MessageRecordDesc ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strMessageDataClassName ) ; if ( messageRecordDesc != null ) messageRecordDesc . init ( this , null ) ; |
public class EffortReportService { /** * Returns the right string representation of the effort level based on given number of points . */
public static String getEffortLevelDescription ( Verbosity verbosity , int points ) { } } | EffortLevel level = EffortLevel . forPoints ( points ) ; switch ( verbosity ) { case ID : return level . name ( ) ; case VERBOSE : return level . getVerboseDescription ( ) ; case SHORT : default : return level . getShortDescription ( ) ; } |
public class TupleGenerator { /** * Returns the set of bindings that provide at least one of the given properties */
private Set < VarBindingDef > getPropertyProviders ( Set < String > properties ) { } } | Set < VarBindingDef > bindings = new HashSet < VarBindingDef > ( ) ; for ( String property : properties ) { bindings . addAll ( propertyProviders_ . get ( property ) ) ; } return bindings ; |
public class CacheUtil { /** * Copies the < tt > resource < / tt > to < tt > copy < / tt > . Decompression is
* performed if the resource file is identified as a GZIP - encoded file .
* @ param resource { @ link File } , the resource to copy
* @ param resourceLocation { @ link String } , the resource location url
* @ param copy { @ link File } , the file to copy to
* @ return { @ link File } , the copied file
* @ throws ResourceDownloadError Thrown if an IO error copying the resource */
private static File copyWithDecompression ( final File resource , final String resourceLocation , final File copy ) throws ResourceDownloadError { } } | GZIPInputStream gzipis = null ; FileOutputStream fout = null ; try { MagicNumberFileFilter mnff = new MagicNumberFileFilter ( GZIP_MAGIC_NUMBER ) ; if ( mnff . accept ( resource ) ) { gzipis = new GZIPInputStream ( new FileInputStream ( resource ) ) ; byte [ ] buffer = new byte [ 8192 ] ; fout = new FileOutputStream ( copy ) ; int length ; while ( ( length = gzipis . read ( buffer , 0 , 8192 ) ) != - 1 ) { fout . write ( buffer , 0 , length ) ; } } else { copyFile ( resource , copy ) ; } } catch ( IOException e ) { String msg = e . getMessage ( ) ; ResourceDownloadError r = new ResourceDownloadError ( resourceLocation , msg ) ; r . initCause ( e ) ; throw r ; } finally { // clean up all I / O resources
closeQuietly ( fout ) ; closeQuietly ( gzipis ) ; } return copy ; |
public class WikiScannerUtil { /** * Skips the specified sequence if it starts from the given position in the
* character array .
* @ param array the array of characters
* @ param arrayPos the position of the first character in the array ;
* starting from this position the sequence should be skipped
* @ param sequence the sequence of characters to skip
* @ return a new value of the character counter */
public static int skipSequence ( char [ ] array , int arrayPos , char [ ] sequence ) { } } | int i ; int j ; for ( i = arrayPos , j = 0 ; i < array . length && j < sequence . length ; i ++ , j ++ ) { if ( array [ i ] != sequence [ j ] ) { break ; } } return j == sequence . length ? i : arrayPos ; |
public class XFastEventList { /** * Consolidates this fast event list . Consolidation implies , that all
* overflow and skipping data structures are freed , and the buffered
* representation is brought completely in - line with the virtual current
* contents of the list .
* The actual consolidation will be skipped , if no need for it is detected
* by the algorithm .
* @ return Whether consolidation has been performed . */
public synchronized boolean consolidate ( ) throws IOException { } } | if ( isTainted ( ) ) { // proceed with consolidation
XSequentialEventBuffer nBuffer = new XSequentialEventBuffer ( buffer . getProvider ( ) , this . attributeMapSerializer ) ; int overflowIndex = 0 ; int fileBufferIndex = 0 ; for ( int i = 0 ; i < size ; i ++ ) { if ( overflowIndex < overflowSize && overflowIndices [ overflowIndex ] == i ) { nBuffer . append ( overflowEntries [ overflowIndex ] ) ; overflowIndex ++ ; } else { while ( holeFlags . get ( fileBufferIndex ) == true ) { fileBufferIndex ++ ; } nBuffer . append ( buffer . get ( fileBufferIndex ) ) ; fileBufferIndex ++ ; } } buffer . cleanup ( ) ; buffer = nBuffer ; overflowSize = 0 ; holeFlags . clear ( ) ; return true ; } else { return false ; } |
public class InternalSARLParser { /** * InternalSARL . g : 16569:1 : ruleJvmTypeParameter returns [ EObject current = null ] : ( ( ( lv _ name _ 0_0 = ruleValidID ) ) ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ? ) ; */
public final EObject ruleJvmTypeParameter ( ) throws RecognitionException { } } | EObject current = null ; AntlrDatatypeRuleToken lv_name_0_0 = null ; EObject lv_constraints_1_0 = null ; EObject lv_constraints_2_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 16575:2 : ( ( ( ( lv _ name _ 0_0 = ruleValidID ) ) ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ? ) )
// InternalSARL . g : 16576:2 : ( ( ( lv _ name _ 0_0 = ruleValidID ) ) ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ? )
{ // InternalSARL . g : 16576:2 : ( ( ( lv _ name _ 0_0 = ruleValidID ) ) ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ? )
// InternalSARL . g : 16577:3 : ( ( lv _ name _ 0_0 = ruleValidID ) ) ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ?
{ // InternalSARL . g : 16577:3 : ( ( lv _ name _ 0_0 = ruleValidID ) )
// InternalSARL . g : 16578:4 : ( lv _ name _ 0_0 = ruleValidID )
{ // InternalSARL . g : 16578:4 : ( lv _ name _ 0_0 = ruleValidID )
// InternalSARL . g : 16579:5 : lv _ name _ 0_0 = ruleValidID
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmTypeParameterAccess ( ) . getNameValidIDParserRuleCall_0_0 ( ) ) ; } pushFollow ( FOLLOW_155 ) ; lv_name_0_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmTypeParameterRule ( ) ) ; } set ( current , "name" , lv_name_0_0 , "org.eclipse.xtend.core.Xtend.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 16596:3 : ( ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) * ) ?
int alt380 = 2 ; int LA380_0 = input . LA ( 1 ) ; if ( ( LA380_0 == 28 ) ) { alt380 = 1 ; } switch ( alt380 ) { case 1 : // InternalSARL . g : 16597:4 : ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) ) ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) *
{ // InternalSARL . g : 16597:4 : ( ( lv _ constraints _ 1_0 = ruleJvmUpperBound ) )
// InternalSARL . g : 16598:5 : ( lv _ constraints _ 1_0 = ruleJvmUpperBound )
{ // InternalSARL . g : 16598:5 : ( lv _ constraints _ 1_0 = ruleJvmUpperBound )
// InternalSARL . g : 16599:6 : lv _ constraints _ 1_0 = ruleJvmUpperBound
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmTypeParameterAccess ( ) . getConstraintsJvmUpperBoundParserRuleCall_1_0_0 ( ) ) ; } pushFollow ( FOLLOW_154 ) ; lv_constraints_1_0 = ruleJvmUpperBound ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmTypeParameterRule ( ) ) ; } add ( current , "constraints" , lv_constraints_1_0 , "org.eclipse.xtext.xbase.Xtype.JvmUpperBound" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 16616:4 : ( ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded ) ) *
loop379 : do { int alt379 = 2 ; int LA379_0 = input . LA ( 1 ) ; if ( ( LA379_0 == 144 ) ) { alt379 = 1 ; } switch ( alt379 ) { case 1 : // InternalSARL . g : 16617:5 : ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded )
{ // InternalSARL . g : 16617:5 : ( lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded )
// InternalSARL . g : 16618:6 : lv _ constraints _ 2_0 = ruleJvmUpperBoundAnded
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmTypeParameterAccess ( ) . getConstraintsJvmUpperBoundAndedParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_154 ) ; lv_constraints_2_0 = ruleJvmUpperBoundAnded ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmTypeParameterRule ( ) ) ; } add ( current , "constraints" , lv_constraints_2_0 , "org.eclipse.xtext.xbase.Xtype.JvmUpperBoundAnded" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; default : break loop379 ; } } while ( true ) ; } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class OneWayCallable { /** * Executes the request by invoking the wrapped callable unless the call
* has already { @ link # isExpired ( ) expired } . If the call is expired or
* throws an exception while being executed , an appropriate message
* is logged out , including the { @ link # requestDescriptor request descriptor }
* this instance was created with . */
public void call ( ) { } } | if ( isExpired ( ) ) { logger . warn ( "One-way request {} is expired. Not executing." , requestDescriptor ) ; } else { try { getContent ( ) . call ( ) ; } catch ( Exception e ) { logger . error ( "Error while executing one-way request {}." , requestDescriptor , e ) ; } } |
public class StreamConfiguration { /** * Test if this StreamConfiguration and a paired EncodingConfiguration define
* a Subset compliant stream . FLAC defines a subset of options to
* ensure resulting FLAC streams are streamable .
* @ param ec EncodingConfiguration object to check against
* @ return true if these configurations are Subset compliant , false otherwise . */
public boolean isEncodingSubsetCompliant ( EncodingConfiguration ec ) { } } | boolean result = true ; result = isStreamSubsetCompliant ( ) ; if ( this . sampleRate <= 48000 ) { result &= ec . maximumLPCOrder <= 12 ; result &= ec . maximumRicePartitionOrder <= 8 ; } return result ; |
public class OSMReader { /** * TODO remove this ugly stuff via better preparsing phase ! E . g . putting every tags etc into a helper file ! */
double getTmpLatitude ( int id ) { } } | if ( id == EMPTY_NODE ) return Double . NaN ; if ( id < TOWER_NODE ) { // tower node
id = - id - 3 ; return nodeAccess . getLatitude ( id ) ; } else if ( id > - TOWER_NODE ) { // pillar node
id = id - 3 ; return pillarInfo . getLatitude ( id ) ; } else // e . g . if id is not handled from preparse ( e . g . was ignored via isInBounds )
return Double . NaN ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcLinearForceMeasure ( ) { } } | if ( ifcLinearForceMeasureEClass == null ) { ifcLinearForceMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 695 ) ; } return ifcLinearForceMeasureEClass ; |
public class ScriptRuntime { /** * Helper function for toNumber , parseInt , and TokenStream . getToken . */
private static double stringToNumber ( String source , int sourceStart , int sourceEnd , int radix , boolean isPrefix ) { } } | char digitMax = '9' ; char lowerCaseBound = 'a' ; char upperCaseBound = 'A' ; if ( radix < 10 ) { digitMax = ( char ) ( '0' + radix - 1 ) ; } if ( radix > 10 ) { lowerCaseBound = ( char ) ( 'a' + radix - 10 ) ; upperCaseBound = ( char ) ( 'A' + radix - 10 ) ; } int end ; double sum = 0.0 ; for ( end = sourceStart ; end <= sourceEnd ; end ++ ) { char c = source . charAt ( end ) ; int newDigit ; if ( '0' <= c && c <= digitMax ) newDigit = c - '0' ; else if ( 'a' <= c && c < lowerCaseBound ) newDigit = c - 'a' + 10 ; else if ( 'A' <= c && c < upperCaseBound ) newDigit = c - 'A' + 10 ; else if ( ! isPrefix ) return NaN ; // isn ' t a prefix but found unexpected char
else break ; // unexpected char
sum = sum * radix + newDigit ; } if ( sourceStart == end ) { // stopped right at the beginning
return NaN ; } if ( sum > NativeNumber . MAX_SAFE_INTEGER ) { if ( radix == 10 ) { /* If we ' re accumulating a decimal number and the number
* is > = 2 ^ 53 , then the result from the repeated multiply - add
* above may be inaccurate . Call Java to get the correct
* answer . */
try { return Double . parseDouble ( source . substring ( sourceStart , end ) ) ; } catch ( NumberFormatException nfe ) { return NaN ; } } else if ( radix == 2 || radix == 4 || radix == 8 || radix == 16 || radix == 32 ) { /* The number may also be inaccurate for one of these bases .
* This happens if the addition in value * radix + digit causes
* a round - down to an even least significant mantissa bit
* when the first dropped bit is a one . If any of the
* following digits in the number ( which haven ' t been added
* in yet ) are nonzero then the correct action would have
* been to round up instead of down . An example of this
* occurs when reading the number 0x10000081 , which
* rounds to 0x100000 instead of 0x100000100. */
int bitShiftInChar = 1 ; int digit = 0 ; final int SKIP_LEADING_ZEROS = 0 ; final int FIRST_EXACT_53_BITS = 1 ; final int AFTER_BIT_53 = 2 ; final int ZEROS_AFTER_54 = 3 ; final int MIXED_AFTER_54 = 4 ; int state = SKIP_LEADING_ZEROS ; int exactBitsLimit = 53 ; double factor = 0.0 ; boolean bit53 = false ; // bit54 is the 54th bit ( the first dropped from the mantissa )
boolean bit54 = false ; int pos = sourceStart ; for ( ; ; ) { if ( bitShiftInChar == 1 ) { if ( pos == end ) break ; digit = source . charAt ( pos ++ ) ; if ( '0' <= digit && digit <= '9' ) digit -= '0' ; else if ( 'a' <= digit && digit <= 'z' ) digit -= 'a' - 10 ; else digit -= 'A' - 10 ; bitShiftInChar = radix ; } bitShiftInChar >>= 1 ; boolean bit = ( digit & bitShiftInChar ) != 0 ; switch ( state ) { case SKIP_LEADING_ZEROS : if ( bit ) { -- exactBitsLimit ; sum = 1.0 ; state = FIRST_EXACT_53_BITS ; } break ; case FIRST_EXACT_53_BITS : sum *= 2.0 ; if ( bit ) sum += 1.0 ; -- exactBitsLimit ; if ( exactBitsLimit == 0 ) { bit53 = bit ; state = AFTER_BIT_53 ; } break ; case AFTER_BIT_53 : bit54 = bit ; factor = 2.0 ; state = ZEROS_AFTER_54 ; break ; case ZEROS_AFTER_54 : if ( bit ) { state = MIXED_AFTER_54 ; } // fallthrough
case MIXED_AFTER_54 : factor *= 2 ; break ; } } switch ( state ) { case SKIP_LEADING_ZEROS : sum = 0.0 ; break ; case FIRST_EXACT_53_BITS : case AFTER_BIT_53 : // do nothing
break ; case ZEROS_AFTER_54 : // x1.1 - > x1 + 1 ( round up )
// x0.1 - > x0 ( round down )
if ( bit54 & bit53 ) sum += 1.0 ; sum *= factor ; break ; case MIXED_AFTER_54 : // x . 100 . . . 1 . . - > x + 1 ( round up )
// x . 0anything - > x ( round down )
if ( bit54 ) sum += 1.0 ; sum *= factor ; break ; } } /* We don ' t worry about inaccurate numbers for any other base . */
} return sum ; |
public class DiskClient { /** * Sets the labels on a disk . To learn more about labels , read the Labeling Resources
* documentation .
* < p > Sample code :
* < pre > < code >
* try ( DiskClient diskClient = DiskClient . create ( ) ) {
* ProjectZoneDiskResourceName resource = ProjectZoneDiskResourceName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ RESOURCE ] " ) ;
* ZoneSetLabelsRequest zoneSetLabelsRequestResource = ZoneSetLabelsRequest . newBuilder ( ) . build ( ) ;
* Operation response = diskClient . setLabelsDisk ( resource . toString ( ) , zoneSetLabelsRequestResource ) ;
* < / code > < / pre >
* @ param resource Name or id of the resource for this request .
* @ param zoneSetLabelsRequestResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation setLabelsDisk ( String resource , ZoneSetLabelsRequest zoneSetLabelsRequestResource ) { } } | SetLabelsDiskHttpRequest request = SetLabelsDiskHttpRequest . newBuilder ( ) . setResource ( resource ) . setZoneSetLabelsRequestResource ( zoneSetLabelsRequestResource ) . build ( ) ; return setLabelsDisk ( request ) ; |
public class BindHandler { /** * Performs the actual binding ( sets the endpoint on the Connection , registers the connection )
* without any spoofing or other validation checks .
* When executed on the connection initiator side , the connection is registered on the remote address
* with which it was registered in { @ link TcpIpEndpointManager # connectionsInProgress } ,
* ignoring the { @ code remoteEndpoint } argument .
* @ param connection the connection to bind
* @ param remoteEndpoint the address of the remote endpoint
* @ param remoteAddressAliases alias addresses as provided by the remote endpoint , under which the connection
* will be registered . These are the public addresses configured on the remote . */
@ SuppressWarnings ( { } } | "checkstyle:npathcomplexity" } ) @ SuppressFBWarnings ( "RV_RETURN_VALUE_OF_PUTIFABSENT_IGNORED" ) private synchronized boolean bind0 ( TcpIpConnection connection , Address remoteEndpoint , Collection < Address > remoteAddressAliases , boolean reply ) { final Address remoteAddress = new Address ( connection . getRemoteSocketAddress ( ) ) ; if ( tcpIpEndpointManager . connectionsInProgress . contains ( remoteAddress ) ) { // this is the connection initiator side - - > register the connection under the address that was requested
remoteEndpoint = remoteAddress ; } if ( remoteEndpoint == null ) { if ( remoteAddressAliases == null ) { throw new IllegalStateException ( "Remote endpoint and remote address aliases cannot be both null" ) ; } else { // let it fail if no remoteEndpoint and no aliases are defined
remoteEndpoint = remoteAddressAliases . iterator ( ) . next ( ) ; } } connection . setEndPoint ( remoteEndpoint ) ; ioService . onSuccessfulConnection ( remoteEndpoint ) ; if ( reply ) { BindRequest bindRequest = new BindRequest ( logger , ioService , connection , remoteEndpoint , false ) ; bindRequest . send ( ) ; } if ( checkAlreadyConnected ( connection , remoteEndpoint ) ) { return false ; } if ( logger . isLoggable ( Level . FINEST ) ) { logger . finest ( "Registering connection " + connection + " to address " + remoteEndpoint ) ; } boolean returnValue = tcpIpEndpointManager . registerConnection ( remoteEndpoint , connection ) ; if ( remoteAddressAliases != null && returnValue ) { for ( Address remoteAddressAlias : remoteAddressAliases ) { if ( logger . isLoggable ( Level . FINEST ) ) { logger . finest ( "Registering connection " + connection + " to address alias " + remoteAddressAlias ) ; } tcpIpEndpointManager . connectionsMap . putIfAbsent ( remoteAddressAlias , connection ) ; } } return returnValue ; |
public class UpdateIdentityPoolResult { /** * Optional key : value pairs mapping provider names to provider app IDs .
* @ param supportedLoginProviders
* Optional key : value pairs mapping provider names to provider app IDs .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateIdentityPoolResult withSupportedLoginProviders ( java . util . Map < String , String > supportedLoginProviders ) { } } | setSupportedLoginProviders ( supportedLoginProviders ) ; return this ; |
public class SanityChecks { /** * Check matrix multiplication . This is already ATLAS / BLAS code . */
public static void checkMatrixMultiplication ( ) { } } | DoubleMatrix A = new DoubleMatrix ( new double [ ] [ ] { { 1.0 , 2.0 , 3.0 } , { 4.0 , 5.0 , 6.0 } , { 7.0 , 8.0 , 9.0 } } ) ; DoubleMatrix E = new DoubleMatrix ( new double [ ] [ ] { { 0.0 , 0.0 , 1.0 } , { 0.0 , 1.0 , 0.0 } , { 1.0 , 0.0 , 0.0 } } ) ; DoubleMatrix B = new DoubleMatrix ( new double [ ] [ ] { { 3.0 , 2.0 , 1.0 } , { 6.0 , 5.0 , 4.0 } , { 9.0 , 8.0 , 7.0 } } ) ; check ( "checking matrix multiplication" , A . mmul ( E ) . equals ( B ) ) ; |
public class RelationQueryNode { /** * Sets the relative path to the property in this relation .
* @ param relPath the relative path to a property .
* @ throws IllegalArgumentException if < code > relPath < / code > is absolute . */
public void setRelativePath ( QPath relPath ) { } } | if ( relPath != null && relPath . isAbsolute ( ) ) { throw new IllegalArgumentException ( "relPath must be relative" ) ; } this . relPath = relPath ; |
public class Transloadit { /** * Returns the bill for the month specified .
* @ param month for which bill to retrieve .
* @ param year for which bill to retrieve .
* @ return { @ link Response }
* @ throws RequestException if request to transloadit server fails .
* @ throws LocalOperationException if something goes wrong while running non - http operations . */
public Response getBill ( int month , int year ) throws RequestException , LocalOperationException { } } | Request request = new Request ( this ) ; return new Response ( request . get ( "/bill/" + year + String . format ( "-%02d" , month ) ) ) ; |
public class RegexMatcher { /** * Returns the match result as soon as in accepting state . Is not greedy .
* For a * will match a from aaa .
* @ param cc
* @ return
* @ throws java . lang . NullPointerException If not compiled */
@ Override public Status match ( int cc ) { } } | state = state . transit ( cc ) ; if ( state != null ) { if ( state . isAccepting ( ) ) { matched = state . getToken ( ) ; state = root ; return Status . Match ; } else { return Status . Ok ; } } else { state = root ; return Status . Error ; } |
public class BccClient { /** * Detaching the specified volume from a specified instance .
* You can detach the specified volume from a specified instance only
* when the instance is Running or Stopped ,
* otherwise , it ' s will get < code > 409 < / code > errorCode .
* @ param request The request containing all options for detaching the specified volume from a specified instance . */
public void detachVolume ( DetachVolumeRequest request ) { } } | checkNotNull ( request , "request should not be null." ) ; checkStringNotEmpty ( request . getVolumeId ( ) , "request volumeId should not be empty." ) ; checkStringNotEmpty ( request . getInstanceId ( ) , "request instanceId should not be empty." ) ; InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . PUT , VOLUME_PREFIX , request . getVolumeId ( ) ) ; internalRequest . addParameter ( VolumeAction . detach . name ( ) , null ) ; fillPayload ( internalRequest , request ) ; invokeHttpClient ( internalRequest , AbstractBceResponse . class ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcGeometricProjectionEnum ( ) { } } | if ( ifcGeometricProjectionEnumEEnum == null ) { ifcGeometricProjectionEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 998 ) ; } return ifcGeometricProjectionEnumEEnum ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.