signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TaskSendTargets { /** * direct send tuple to special task */ public List < Integer > get ( Integer out_task_id , String stream , List < Object > tuple , Collection < Tuple > anchors , Object root_id ) { } }
// in order to improve acker ' s performance , skip checking // String target _ component = // topologyContext . getComponentId ( out _ task _ id ) ; // Map < String , MkGrouper > component _ prouping = streamComponentGrouper // . get ( stream ) ; // MkGrouper grouping = component _ prouping . get ( target _ component ) ; // if ( grouping ! = null & & // ! GrouperType . direct . equals ( grouping . gettype ( ) ) ) { // throw new IllegalArgumentException ( // " Cannot emitDirect to a task expecting a regular grouping " ) ; if ( isDebug ( anchors , root_id ) ) { LOG . info ( debugIdStr + stream + " to " + out_task_id + ":" + tuple ) ; } taskStats . send_tuple ( stream , 1 ) ; List < Integer > out_tasks = new ArrayList < > ( ) ; out_tasks . add ( out_task_id ) ; return out_tasks ;
public class PluginRepositoryUtil { /** * Parses a plugin option XML definition . * @ param option * The plugin option XML definition * @ return The parsed plugin option */ private static PluginOption parsePluginOption ( final Element option ) { } }
PluginOption po = new PluginOption ( ) ; po . setArgName ( option . attributeValue ( "argName" ) ) ; po . setArgsCount ( Integer . valueOf ( option . attributeValue ( "argsCount" , "1" ) ) ) ; po . setArgsOptional ( Boolean . valueOf ( option . attributeValue ( "optionalArgs" , "false" ) ) ) ; po . setDescription ( option . attributeValue ( "description" ) ) ; po . setHasArgs ( Boolean . parseBoolean ( option . attributeValue ( "hasArgs" , "false" ) ) ) ; po . setLongOpt ( option . attributeValue ( "longName" ) ) ; po . setOption ( option . attributeValue ( "shortName" ) ) ; po . setRequired ( Boolean . parseBoolean ( option . attributeValue ( "required" , "false" ) ) ) ; po . setType ( option . attributeValue ( "type" ) ) ; po . setValueSeparator ( option . attributeValue ( "separator" ) ) ; return po ;
public class FulltextIndexerModule { /** * ~ - - - private methods - - - - - */ private static int flushWordBuffer ( final StringBuilder lineBuffer , final StringBuilder wordBuffer , final boolean prepend ) { } }
int wordCount = 0 ; if ( wordBuffer . length ( ) > 0 ) { final String word = wordBuffer . toString ( ) . replaceAll ( "[\\n\\t]+" , " " ) ; if ( StringUtils . isNotBlank ( word ) ) { if ( prepend ) { lineBuffer . insert ( 0 , word ) ; } else { lineBuffer . append ( word ) ; } // increase word count wordCount = 1 ; } wordBuffer . setLength ( 0 ) ; } return wordCount ;
public class Utils { /** * Truncate the given GregorianCalendar date to the nearest week . This is done by * cloning it and rounding the value down to the closest Monday . If the given date * already occurs on a Monday , a copy of the same date is returned . * @ param date A GregorianCalendar object . * @ return A copy of the same value , truncated to the nearest Monday . */ public static GregorianCalendar truncateToWeek ( GregorianCalendar date ) { } }
// Round the date down to the MONDAY of the same week . GregorianCalendar result = ( GregorianCalendar ) date . clone ( ) ; switch ( result . get ( Calendar . DAY_OF_WEEK ) ) { case Calendar . TUESDAY : result . add ( Calendar . DAY_OF_MONTH , - 1 ) ; break ; case Calendar . WEDNESDAY : result . add ( Calendar . DAY_OF_MONTH , - 2 ) ; break ; case Calendar . THURSDAY : result . add ( Calendar . DAY_OF_MONTH , - 3 ) ; break ; case Calendar . FRIDAY : result . add ( Calendar . DAY_OF_MONTH , - 4 ) ; break ; case Calendar . SATURDAY : result . add ( Calendar . DAY_OF_MONTH , - 5 ) ; break ; case Calendar . SUNDAY : result . add ( Calendar . DAY_OF_MONTH , - 6 ) ; break ; default : break ; } return result ;
public class Dates { /** * 添加毫秒 * @ param date * @ param millis * @ return */ public static Date plusMillis ( Date date , int millis ) { } }
return plus ( date , millis , DateTimeField . MILLIS ) ;
public class ServerCommandClient { /** * Stop the server by issuing a " stop " instruction to the server listener */ public ReturnCode stopServer ( boolean force ) { } }
return write ( force ? FORCE_STOP_COMMAND : STOP_COMMAND , ReturnCode . REDUNDANT_ACTION_STATUS , ReturnCode . ERROR_SERVER_STOP ) ;
public class SpdyHttpEncoder { /** * Checks if the given HTTP message should be considered as a last SPDY frame . * @ param httpMessage check this HTTP message * @ return whether the given HTTP message should generate a < em > last < / em > SPDY frame . */ private static boolean isLast ( HttpMessage httpMessage ) { } }
if ( httpMessage instanceof FullHttpMessage ) { FullHttpMessage fullMessage = ( FullHttpMessage ) httpMessage ; if ( fullMessage . trailingHeaders ( ) . isEmpty ( ) && ! fullMessage . content ( ) . isReadable ( ) ) { return true ; } } return false ;
public class GridFile { /** * Checks whether the parent directories are present ( and are directories ) . If create _ if _ absent is true , * creates missing dirs * @ param path * @ param create _ if _ absent * @ return */ protected boolean checkParentDirs ( String path , boolean create_if_absent ) throws IOException { } }
String [ ] components = Util . components ( path , File . separator ) ; if ( components == null ) return false ; if ( components . length == 1 ) // no parent directories to create , e . g . " data . txt " return true ; StringBuilder sb = new StringBuilder ( ) ; boolean first = true ; for ( int i = 0 ; i < components . length - 1 ; i ++ ) { String tmp = components [ i ] ; if ( ! tmp . equals ( File . separator ) ) { if ( first ) first = false ; else sb . append ( File . separator ) ; } sb . append ( tmp ) ; String comp = sb . toString ( ) ; if ( exists ( comp ) ) { if ( isFile ( comp ) ) throw new IOException ( "cannot create " + path + " as component " + comp + " is a file" ) ; } else { if ( create_if_absent ) cache . put ( comp , new Metadata ( 0 , System . currentTimeMillis ( ) , chunk_size , Metadata . DIR ) , ( short ) - 1 , 0 ) ; else return false ; } } return true ;
public class PostCodeBuilder { /** * { @ inheritDoc } */ @ Override public PostCode buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } }
return new PostCodeImpl ( namespaceURI , localName , namespacePrefix ) ;
public class BindTableGenerator { /** * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . core . ModelElementVisitor # visit ( com . abubusoft . kripton . processor . sqlite . model . SQLiteDatabaseSchema , * com . abubusoft . kripton . processor . core . ModelClass ) */ @ Override public void visit ( SQLiteDatabaseSchema schema , SQLiteEntity entity ) throws Exception { } }
int indexCounter = 0 ; // generate the class name that represents the table String classTableName = getTableClassName ( entity . getSimpleName ( ) ) ; FindIndexesVisitor indexVisitor = new FindIndexesVisitor ( ) ; List < ? extends AnnotationMirror > annotationMirrors = entity . getElement ( ) . getAnnotationMirrors ( ) ; for ( AnnotationMirror annotationMirror : annotationMirrors ) { Map < ? extends ExecutableElement , ? extends AnnotationValue > elementValues = annotationMirror . getElementValues ( ) ; if ( BindSqlType . class . getName ( ) . equals ( annotationMirror . getAnnotationType ( ) . toString ( ) ) ) { for ( Map . Entry < ? extends ExecutableElement , ? extends AnnotationValue > entry : elementValues . entrySet ( ) ) { // The ' entry . getKey ( ) ' here is the annotation attribute // name . String key = entry . getKey ( ) . getSimpleName ( ) . toString ( ) ; entry . getValue ( ) . accept ( indexVisitor , key ) ; } } } PackageElement pkg = elementUtils . getPackageOf ( entity . getElement ( ) ) ; String packageName = pkg . isUnnamed ( ) ? null : pkg . getQualifiedName ( ) . toString ( ) ; AnnotationProcessorUtilis . infoOnGeneratedClasses ( BindDataSource . class , packageName , classTableName ) ; classBuilder = TypeSpec . classBuilder ( classTableName ) . addModifiers ( Modifier . PUBLIC ) . addSuperinterface ( SQLiteTable . class ) ; BindTypeContext context = new BindTypeContext ( classBuilder , TypeUtility . typeName ( packageName , classTableName ) , Modifier . STATIC , Modifier . PRIVATE ) ; // javadoc for class classBuilder . addJavadoc ( "<p>" ) ; classBuilder . addJavadoc ( "\nEntity <code>$L</code> is associated to table <code>$L</code>\n" , entity . getSimpleName ( ) , entity . getTableName ( ) ) ; classBuilder . addJavadoc ( "This class represents table associated to entity.\n" ) ; classBuilder . addJavadoc ( "</p>\n" ) ; JavadocUtility . generateJavadocGeneratedBy ( classBuilder ) ; classBuilder . addJavadoc ( " @see $T\n" , TypeUtility . className ( entity . getName ( ) ) ) ; { // @ formatter : off // table _ name FieldSpec fieldSpec = FieldSpec . builder ( String . class , "TABLE_NAME" , Modifier . PUBLIC , Modifier . STATIC , Modifier . FINAL ) . initializer ( "\"$L\"" , entity . getTableName ( ) ) . addJavadoc ( "Costant represents typeName of table $L\n" , entity . getTableName ( ) ) . build ( ) ; classBuilder . addField ( fieldSpec ) ; // @ formatter : on } StringBuilder bufferTable = new StringBuilder ( ) ; StringBuilder bufferForeignKey = new StringBuilder ( ) ; // shared between create table and drop table StringBuilder bufferIndexesCreate = new StringBuilder ( ) ; StringBuilder bufferDropTable = new StringBuilder ( ) ; StringBuilder bufferIndexesDrop = new StringBuilder ( ) ; bufferTable . append ( "CREATE TABLE " + entity . getTableName ( ) ) ; // define column typeName set String separator = "" ; bufferTable . append ( " (" ) ; // for each column , that need to be persisted on table for ( SQLProperty item : entity . getCollection ( ) ) { bufferTable . append ( separator ) ; bufferTable . append ( item . columnName ) ; bufferTable . append ( " " + SQLTransformer . columnTypeAsString ( item ) ) ; switch ( item . columnType ) { case PRIMARY_KEY : case PRIMARY_KEY_UNMANGED : bufferTable . append ( " PRIMARY KEY" ) ; if ( ! item . isType ( String . class ) && item . columnType == ColumnType . PRIMARY_KEY ) { bufferTable . append ( " AUTOINCREMENT" ) ; } bufferTable . append ( " NOT NULL" ) ; break ; case UNIQUE : bufferTable . append ( " UNIQUE" ) ; break ; case INDEXED : bufferIndexesCreate . append ( String . format ( " CREATE INDEX idx_%s_%s ON %s(%s);" , entity . getTableName ( ) , item . columnName , entity . getTableName ( ) , item . columnName ) ) ; bufferIndexesDrop . append ( String . format ( " DROP INDEX IF EXISTS idx_%s_%s;" , entity . getTableName ( ) , item . columnName ) ) ; break ; case STANDARD : break ; } boolean nullable = item . isNullable ( ) ; // if it is not primary key and it is not nullable , then add not // null if ( ! nullable && ! ( item . columnType == ColumnType . PRIMARY_KEY || item . columnType == ColumnType . PRIMARY_KEY_UNMANGED ) ) { bufferTable . append ( " NOT NULL" ) ; } // foreign key String foreignClassName = item . foreignParentClassName ; if ( item . isForeignKey ( ) ) { SQLiteEntity reference = model . getEntity ( foreignClassName ) ; if ( reference == null ) { // check if we have a DAO associated into DataSource // definition boolean found = false ; for ( SQLiteDaoDefinition daoDefinition : schema . getCollection ( ) ) { if ( daoDefinition . getEntityClassName ( ) . equals ( foreignClassName ) ) { found = true ; } } if ( ! found ) { throw new NoDaoElementFound ( schema , TypeUtility . className ( foreignClassName ) ) ; } else { throw new InvalidBeanTypeException ( item , foreignClassName ) ; } } // foreign key can ben used only with column type // long / Long if ( ! TypeUtility . isTypeIncludedIn ( item . getPropertyType ( ) . getTypeName ( ) , Long . class , Long . TYPE , String . class ) ) { throw new InvalidForeignKeyTypeException ( item ) ; } bufferForeignKey . append ( ", FOREIGN KEY(" + item . columnName + ") REFERENCES " + reference . getTableName ( ) + "(" + reference . getPrimaryKey ( ) . columnName + ")" ) ; if ( item . onDeleteAction != ForeignKeyAction . NO_ACTION ) { bufferForeignKey . append ( " ON DELETE " + item . onDeleteAction . toString ( ) . replaceAll ( "_" , " " ) ) ; } if ( item . onUpdateAction != ForeignKeyAction . NO_ACTION ) { bufferForeignKey . append ( " ON UPDATE " + item . onUpdateAction . toString ( ) . replaceAll ( "_" , " " ) ) ; } // INSERT as dependency only if reference is another entity . // Same entity can not be own dependency . if ( ! entity . equals ( reference ) ) { entity . referedEntities . add ( reference ) ; } } separator = ", " ; } // add foreign key bufferTable . append ( bufferForeignKey . toString ( ) ) ; // add multicolumn indexes ( UNIQUE ) { Triple < String , String , String > multiIndexes = buldIndexes ( entity , indexVisitor . getUniqueIndexes ( ) , true , indexCounter ) ; // add constraint unique ( if present ) bufferTable . append ( multiIndexes . value2 ) ; // close the table definition bufferTable . append ( ");" ) ; if ( ! StringUtils . isEmpty ( multiIndexes . value0 ) ) { bufferTable . append ( multiIndexes . value0 + ";" ) ; bufferIndexesDrop . append ( multiIndexes . value1 + ";" ) ; } } // add indexes creation one table if ( bufferIndexesCreate . length ( ) > 0 ) { bufferTable . append ( bufferIndexesCreate . toString ( ) ) ; } // add multicolumn indexes ( NOT UNIQUE ) { Pair < String , String > multiIndexes = buldIndexes ( entity , indexVisitor . getNotUniqueIndexes ( ) , false , indexCounter ) ; if ( ! StringUtils . isEmpty ( multiIndexes . value0 ) ) { bufferTable . append ( multiIndexes . value0 + ";" ) ; bufferIndexesDrop . append ( multiIndexes . value1 + ";" ) ; } } { // create table SQL // @ formatter : off FieldSpec . Builder fieldSpec = FieldSpec . builder ( String . class , "CREATE_TABLE_SQL" ) . addModifiers ( Modifier . STATIC , Modifier . FINAL , Modifier . PUBLIC ) ; // @ formatter : on // @ formatter : off fieldSpec . addJavadoc ( "<p>\nDDL to create table $L\n</p>\n" , entity . getTableName ( ) ) ; fieldSpec . addJavadoc ( "\n<pre>$L</pre>\n" , bufferTable . toString ( ) ) ; // @ formatter : on classBuilder . addField ( fieldSpec . initializer ( "$S" , bufferTable . toString ( ) ) . build ( ) ) ; } // drop table SQL // index does not need to be dropped , they are automatically detroyed // with tables if ( bufferIndexesDrop . length ( ) > 0 ) { bufferDropTable . append ( bufferIndexesDrop . toString ( ) ) ; } bufferDropTable . append ( "DROP TABLE IF EXISTS " + entity . getTableName ( ) + ";" ) ; { // @ formatter : off FieldSpec fieldSpec = FieldSpec . builder ( String . class , "DROP_TABLE_SQL" ) . addModifiers ( Modifier . STATIC , Modifier . FINAL , Modifier . PUBLIC ) . initializer ( "$S" , bufferDropTable . toString ( ) ) . addJavadoc ( "<p>\nDDL to drop table $L\n</p>\n" , entity . getTableName ( ) ) . addJavadoc ( "\n<pre>$L</pre>\n" , bufferDropTable . toString ( ) ) . build ( ) ; // @ formatter : on classBuilder . addField ( fieldSpec ) ; } // define column typeName set for ( ModelProperty item : entity . getCollection ( ) ) { item . accept ( this ) ; } ManagedPropertyPersistenceHelper . generateFieldPersistance ( context , entity . getCollection ( ) , PersistType . BYTE , true , Modifier . STATIC , Modifier . PUBLIC ) ; model . sqlForCreate . add ( bufferTable . toString ( ) ) ; model . sqlForDrop . add ( bufferDropTable . toString ( ) ) ; generateColumnsArray ( entity ) ; TypeSpec typeSpec = classBuilder . build ( ) ; JavaWriterHelper . writeJava2File ( filer , packageName , typeSpec ) ;
public class WebSocketNodeService { /** * 更改用户ID , 需要更新到CacheSource * @ param olduserid Serializable * @ param newuserid Serializable * @ param sncpAddr InetSocketAddress * @ return 无返回值 */ @ Override public CompletableFuture < Void > changeUserid ( Serializable olduserid , Serializable newuserid , InetSocketAddress sncpAddr ) { } }
tryAcquireSemaphore ( ) ; CompletableFuture < Void > future = sncpNodeAddresses . appendSetItemAsync ( SOURCE_SNCP_USERID_PREFIX + newuserid , InetSocketAddress . class , sncpAddr ) ; future = future . thenAccept ( ( a ) -> sncpNodeAddresses . removeSetItemAsync ( SOURCE_SNCP_USERID_PREFIX + olduserid , InetSocketAddress . class , sncpAddr ) ) ; if ( semaphore != null ) future . whenComplete ( ( r , e ) -> releaseSemaphore ( ) ) ; if ( logger . isLoggable ( Level . FINEST ) ) logger . finest ( WebSocketNodeService . class . getSimpleName ( ) + ".event: " + olduserid + " changeUserid to " + newuserid + " from " + sncpAddr ) ; return future ;
public class Item { /** * Sets this item to a { @ link ClassWriter # LONG LONG } item . * @ param longVal the value of this item . */ Item set ( final int type , final Number number ) { } }
this . type = type ; this . number = number ; this . hashCode = 0x7FFFFFFF & ( type + number . intValue ( ) ) ; return this ;
public class AbstractChronology { /** * Resolves parsed { @ code ChronoField } values into a date during parsing . * Most { @ code TemporalField } implementations are resolved using the * resolve method on the field . By contrast , the { @ code ChronoField } class * defines fields that only have meaning relative to the chronology . * As such , { @ code ChronoField } date fields are resolved here in the * context of a specific chronology . * { @ code ChronoField } instances are resolved by this method , which may * be overridden in subclasses . * < ul > * < li > { @ code EPOCH _ DAY } - If present , this is converted to a date and * all other date fields are then cross - checked against the date . * < li > { @ code PROLEPTIC _ MONTH } - If present , then it is split into the * { @ code YEAR } and { @ code MONTH _ OF _ YEAR } . If the mode is strict or smart * then the field is validated . * < li > { @ code YEAR _ OF _ ERA } and { @ code ERA } - If both are present , then they * are combined to form a { @ code YEAR } . In lenient mode , the { @ code YEAR _ OF _ ERA } * range is not validated , in smart and strict mode it is . The { @ code ERA } is * validated for range in all three modes . If only the { @ code YEAR _ OF _ ERA } is * present , and the mode is smart or lenient , then the last available era * is assumed . In strict mode , no era is assumed and the { @ code YEAR _ OF _ ERA } is * left untouched . If only the { @ code ERA } is present , then it is left untouched . * < li > { @ code YEAR } , { @ code MONTH _ OF _ YEAR } and { @ code DAY _ OF _ MONTH } - * If all three are present , then they are combined to form a date . * In all three modes , the { @ code YEAR } is validated . * If the mode is smart or strict , then the month and day are validated . * If the mode is lenient , then the date is combined in a manner equivalent to * creating a date on the first day of the first month in the requested year , * then adding the difference in months , then the difference in days . * If the mode is smart , and the day - of - month is greater than the maximum for * the year - month , then the day - of - month is adjusted to the last day - of - month . * If the mode is strict , then the three fields must form a valid date . * < li > { @ code YEAR } and { @ code DAY _ OF _ YEAR } - * If both are present , then they are combined to form a date . * In all three modes , the { @ code YEAR } is validated . * If the mode is lenient , then the date is combined in a manner equivalent to * creating a date on the first day of the requested year , then adding * the difference in days . * If the mode is smart or strict , then the two fields must form a valid date . * < li > { @ code YEAR } , { @ code MONTH _ OF _ YEAR } , { @ code ALIGNED _ WEEK _ OF _ MONTH } and * { @ code ALIGNED _ DAY _ OF _ WEEK _ IN _ MONTH } - * If all four are present , then they are combined to form a date . * In all three modes , the { @ code YEAR } is validated . * If the mode is lenient , then the date is combined in a manner equivalent to * creating a date on the first day of the first month in the requested year , then adding * the difference in months , then the difference in weeks , then in days . * If the mode is smart or strict , then the all four fields are validated to * their outer ranges . The date is then combined in a manner equivalent to * creating a date on the first day of the requested year and month , then adding * the amount in weeks and days to reach their values . If the mode is strict , * the date is additionally validated to check that the day and week adjustment * did not change the month . * < li > { @ code YEAR } , { @ code MONTH _ OF _ YEAR } , { @ code ALIGNED _ WEEK _ OF _ MONTH } and * { @ code DAY _ OF _ WEEK } - If all four are present , then they are combined to * form a date . The approach is the same as described above for * years , months and weeks in { @ code ALIGNED _ DAY _ OF _ WEEK _ IN _ MONTH } . * The day - of - week is adjusted as the next or same matching day - of - week once * the years , months and weeks have been handled . * < li > { @ code YEAR } , { @ code ALIGNED _ WEEK _ OF _ YEAR } and { @ code ALIGNED _ DAY _ OF _ WEEK _ IN _ YEAR } - * If all three are present , then they are combined to form a date . * In all three modes , the { @ code YEAR } is validated . * If the mode is lenient , then the date is combined in a manner equivalent to * creating a date on the first day of the requested year , then adding * the difference in weeks , then in days . * If the mode is smart or strict , then the all three fields are validated to * their outer ranges . The date is then combined in a manner equivalent to * creating a date on the first day of the requested year , then adding * the amount in weeks and days to reach their values . If the mode is strict , * the date is additionally validated to check that the day and week adjustment * did not change the year . * < li > { @ code YEAR } , { @ code ALIGNED _ WEEK _ OF _ YEAR } and { @ code DAY _ OF _ WEEK } - * If all three are present , then they are combined to form a date . * The approach is the same as described above for years and weeks in * { @ code ALIGNED _ DAY _ OF _ WEEK _ IN _ YEAR } . The day - of - week is adjusted as the * next or same matching day - of - week once the years and weeks have been handled . * < / ul > * The default implementation is suitable for most calendar systems . * If { @ link java . time . temporal . ChronoField # YEAR _ OF _ ERA } is found without an { @ link java . time . temporal . ChronoField # ERA } * then the last era in { @ link # eras ( ) } is used . * The implementation assumes a 7 day week , that the first day - of - month * has the value 1 , that first day - of - year has the value 1 , and that the * first of the month and year always exists . * @ param fieldValues the map of fields to values , which can be updated , not null * @ param resolverStyle the requested type of resolve , not null * @ return the resolved date , null if insufficient information to create a date * @ throws java . time . DateTimeException if the date cannot be resolved , typically * because of a conflict in the input data */ @ Override public ChronoLocalDate resolveDate ( Map < TemporalField , Long > fieldValues , ResolverStyle resolverStyle ) { } }
// check epoch - day before inventing era if ( fieldValues . containsKey ( EPOCH_DAY ) ) { return dateEpochDay ( fieldValues . remove ( EPOCH_DAY ) ) ; } // fix proleptic month before inventing era resolveProlepticMonth ( fieldValues , resolverStyle ) ; // invent era if necessary to resolve year - of - era ChronoLocalDate resolved = resolveYearOfEra ( fieldValues , resolverStyle ) ; if ( resolved != null ) { return resolved ; } // build date if ( fieldValues . containsKey ( YEAR ) ) { if ( fieldValues . containsKey ( MONTH_OF_YEAR ) ) { if ( fieldValues . containsKey ( DAY_OF_MONTH ) ) { return resolveYMD ( fieldValues , resolverStyle ) ; } if ( fieldValues . containsKey ( ALIGNED_WEEK_OF_MONTH ) ) { if ( fieldValues . containsKey ( ALIGNED_DAY_OF_WEEK_IN_MONTH ) ) { return resolveYMAA ( fieldValues , resolverStyle ) ; } if ( fieldValues . containsKey ( DAY_OF_WEEK ) ) { return resolveYMAD ( fieldValues , resolverStyle ) ; } } } if ( fieldValues . containsKey ( DAY_OF_YEAR ) ) { return resolveYD ( fieldValues , resolverStyle ) ; } if ( fieldValues . containsKey ( ALIGNED_WEEK_OF_YEAR ) ) { if ( fieldValues . containsKey ( ALIGNED_DAY_OF_WEEK_IN_YEAR ) ) { return resolveYAA ( fieldValues , resolverStyle ) ; } if ( fieldValues . containsKey ( DAY_OF_WEEK ) ) { return resolveYAD ( fieldValues , resolverStyle ) ; } } } return null ;
public class JCufft { /** * Convenience method for { @ link JCufft # cufftExecZ2Z ( cufftHandle , Pointer , Pointer , int ) } . * Accepts arrays for input and output data and automatically performs the host - device * and device - host copies . * @ see jcuda . jcufft . JCufft # cufftExecZ2Z ( cufftHandle , Pointer , Pointer , int ) */ public static int cufftExecZ2Z ( cufftHandle plan , double cIdata [ ] , double cOdata [ ] , int direction ) { } }
int cudaResult = 0 ; boolean inPlace = ( cIdata == cOdata ) ; // Allocate space for the input data on the device Pointer hostCIdata = Pointer . to ( cIdata ) ; Pointer deviceCIdata = new Pointer ( ) ; cudaResult = JCuda . cudaMalloc ( deviceCIdata , cIdata . length * Sizeof . DOUBLE ) ; if ( cudaResult != cudaError . cudaSuccess ) { if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } // Set the output device data to be equal to the input // device data for in - place transforms , or allocate // the output device data if the transform is not // in - place Pointer hostCOdata = null ; Pointer deviceCOdata = null ; if ( inPlace ) { hostCOdata = hostCIdata ; deviceCOdata = deviceCIdata ; } else { hostCOdata = Pointer . to ( cOdata ) ; deviceCOdata = new Pointer ( ) ; cudaResult = JCuda . cudaMalloc ( deviceCOdata , cOdata . length * Sizeof . DOUBLE ) ; if ( cudaResult != cudaError . cudaSuccess ) { JCuda . cudaFree ( deviceCIdata ) ; if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } } // Copy the host input data to the device cudaResult = JCuda . cudaMemcpy ( deviceCIdata , hostCIdata , cIdata . length * Sizeof . DOUBLE , cudaMemcpyKind . cudaMemcpyHostToDevice ) ; if ( cudaResult != cudaError . cudaSuccess ) { JCuda . cudaFree ( deviceCIdata ) ; if ( ! inPlace ) { JCuda . cudaFree ( deviceCOdata ) ; } if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } // Execute the transform int result = cufftResult . CUFFT_SUCCESS ; try { result = JCufft . cufftExecZ2Z ( plan , deviceCIdata , deviceCOdata , direction ) ; } catch ( CudaException e ) { JCuda . cudaFree ( deviceCIdata ) ; if ( ! inPlace ) { JCuda . cudaFree ( deviceCOdata ) ; } result = cufftResult . JCUFFT_INTERNAL_ERROR ; } if ( result != cufftResult . CUFFT_SUCCESS ) { if ( exceptionsEnabled ) { throw new CudaException ( cufftResult . stringFor ( cudaResult ) ) ; } return result ; } // Copy the device output data to the host cudaResult = JCuda . cudaMemcpy ( hostCOdata , deviceCOdata , cOdata . length * Sizeof . DOUBLE , cudaMemcpyKind . cudaMemcpyDeviceToHost ) ; if ( cudaResult != cudaError . cudaSuccess ) { JCuda . cudaFree ( deviceCIdata ) ; if ( ! inPlace ) { JCuda . cudaFree ( deviceCOdata ) ; } if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } // Free the device data cudaResult = JCuda . cudaFree ( deviceCIdata ) ; if ( cudaResult != cudaError . cudaSuccess ) { if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } if ( ! inPlace ) { cudaResult = JCuda . cudaFree ( deviceCOdata ) ; if ( cudaResult != cudaError . cudaSuccess ) { if ( exceptionsEnabled ) { throw new CudaException ( "JCuda error: " + cudaError . stringFor ( cudaResult ) ) ; } return cufftResult . JCUFFT_INTERNAL_ERROR ; } } return result ;
public class EndpointMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Endpoint endpoint , ProtocolMarshaller protocolMarshaller ) { } }
if ( endpoint == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( endpoint . getAddress ( ) , ADDRESS_BINDING ) ; protocolMarshaller . marshall ( endpoint . getCachePeriodInMinutes ( ) , CACHEPERIODINMINUTES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JFapByteBuffer { /** * This method releases the ReceivedData instance but does not release any of the WsByteBuffers * associated with this JFapByteBuffer . This can be used instead of the normal * < code > release ( ) < / code > call where the actual buffer has been used as the underlying backing * storage for another object ( such as a message ) and we do not want that to be released . */ public synchronized void releasePreservingBuffers ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "releasePreservingBuffers" ) ; released = true ; valid = false ; if ( receivedData != null ) { receivedData . release ( ) ; receivedData = null ; } // Simply null out the received buffer receivedBuffer = null ; dataList . clear ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "releasePreservingBuffers" ) ;
public class TaskSlot { /** * Mark the slot as free . A slot can only be marked as free if it ' s empty . * @ return True if the new state is free ; otherwise false */ public boolean markFree ( ) { } }
if ( isEmpty ( ) ) { state = TaskSlotState . FREE ; this . jobId = null ; this . allocationId = null ; return true ; } else { return false ; }
public class VelocityEngineFactory { /** * Returns the VelocityEngine associated with this factory . If this is the first time we are using the engine , * create it and initialise it . < / p > * Note that velocity engines are hugely resource intensive , so we don ' t want too many of them . For the time being * we have a single instance stored as a static variable . This would only be a problem if the VelocityLayout class * ever wanted to use different engine configurations ( unlikely ) . < / p > * @ return the VelocityEngine associated with this factory . */ public static synchronized VelocityEngine getVelocityEngine ( ) { } }
if ( engine == null ) { String fileTemplates = ConfigurationProperties . getVelocityFileTemplates ( ) ; boolean cacheTemplates = ConfigurationProperties . getVelocityCacheTemplates ( ) ; VelocityEngine newEngine = new VelocityEngine ( ) ; Properties props = new Properties ( ) ; // Configure the velocity template differently according to whether we are in // " source mode " or not if ( fileTemplates != null && ! "" . equals ( fileTemplates ) ) { // Source mode LOG . info ( "Velocity engine running in source mode from " + fileTemplates ) ; props . setProperty ( "resource.loader" , "file,class" ) ; props . setProperty ( "file.resource.loader.path" , fileTemplates ) ; props . setProperty ( "file.resource.loader.cache" , "false" ) ; props . setProperty ( "file.resource.loader.modificationCheckInterval" , "2" ) ; props . setProperty ( "class.resource.loader.cache" , "false" ) ; props . setProperty ( "class.resource.loader.modificationCheckInterval" , "2" ) ; props . setProperty ( "class.resource.loader.class" , "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader" ) ; } else { String cache = String . valueOf ( cacheTemplates ) ; props . setProperty ( "class.resource.loader.cache" , cache ) ; props . setProperty ( "resource.loader" , "class" ) ; props . setProperty ( "class.resource.loader.class" , "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader" ) ; } // Setup commons logging for velocity props . setProperty ( RuntimeConstants . RUNTIME_LOG_LOGSYSTEM_CLASS , "com.github.bordertech.wcomponents.velocity.VelocityLogger" ) ; // Set up access to the common velocity macros . props . setProperty ( RuntimeConstants . VM_LIBRARY , ConfigurationProperties . getVelocityMacroLibrary ( ) ) ; try { if ( LOG . isInfoEnabled ( ) ) { // Dump properties StringWriter writer = new StringWriter ( ) ; props . list ( new PrintWriter ( writer ) ) ; LOG . info ( "Configuring velocity with the following properties...\n" + writer ) ; } newEngine . init ( props ) ; } catch ( Exception ex ) { throw new SystemException ( "Failed to configure VelocityEngine" , ex ) ; } engine = newEngine ; } return engine ;
public class DocumentSubscriptions { /** * It opens a subscription and starts pulling documents since a last processed document for that subscription . * The connection options determine client and server cooperation rules like document batch sizes or a timeout in a matter of which a client * needs to acknowledge that batch has been processed . The acknowledgment is sent after all documents are processed by subscription ' s handlers . * There can be only a single client that is connected to a subscription . * @ param clazz Entity class * @ param subscriptionName The name of subscription * @ param < T > Entity class * @ return Subscription object that allows to add / remove subscription handlers . */ public < T > SubscriptionWorker < Revision < T > > getSubscriptionWorkerForRevisions ( Class < T > clazz , String subscriptionName ) { } }
return getSubscriptionWorkerForRevisions ( clazz , subscriptionName , null ) ;
public class SparkStorageUtils { /** * Save a { @ code JavaRDD < List < Writable > > } to a Hadoop { @ link org . apache . hadoop . io . MapFile } . Each record is * given a < i > unique and contiguous < / i > { @ link LongWritable } key , and values are stored as * { @ link RecordWritable } instances . < br > * < b > Note 1 < / b > : If contiguous keys are not required , using a sequence file instead is preferable from a performance * point of view . Contiguous keys are often only required for non - Spark use cases , such as with * { @ link org . datavec . hadoop . records . reader . mapfile . MapFileRecordReader } < br > * < b > Note 2 < / b > : This use a MapFile interval of { @ link # DEFAULT _ MAP _ FILE _ INTERVAL } , which is usually suitable for * use cases such as { @ link org . datavec . hadoop . records . reader . mapfile . MapFileRecordReader } . Use * { @ link # saveMapFile ( String , JavaRDD , int , Integer ) } or { @ link # saveMapFile ( String , JavaRDD , Configuration , Integer ) } * to customize this . < br > * Use { @ link # restoreMapFile ( String , JavaSparkContext ) } to restore values saved with this method . * @ param path Path to save the MapFile * @ param rdd RDD to save * @ see # saveMapFileSequences ( String , JavaRDD ) * @ see # saveSequenceFile ( String , JavaRDD ) */ public static void saveMapFile ( String path , JavaRDD < List < Writable > > rdd ) { } }
saveMapFile ( path , rdd , DEFAULT_MAP_FILE_INTERVAL , null ) ;
public class CertificateWriter { /** * Write the given { @ link X509Certificate } into the given { @ link File } in the given * { @ link KeyFileFormat } format . * @ param certificate * the certificate * @ param file * the file to write in * @ param fileFormat * the file format to write * @ throws IOException * Signals that an I / O exception has occurred . * @ throws CertificateEncodingException * is thrown if an encoding error occurs . */ public static void write ( final X509Certificate certificate , final @ NonNull File file , KeyFileFormat fileFormat ) throws IOException , CertificateEncodingException { } }
write ( certificate , new FileOutputStream ( file ) , fileFormat ) ;
public class DescribeTrustedAdvisorChecksResult { /** * Information about all available Trusted Advisor checks . * @ param checks * Information about all available Trusted Advisor checks . */ public void setChecks ( java . util . Collection < TrustedAdvisorCheckDescription > checks ) { } }
if ( checks == null ) { this . checks = null ; return ; } this . checks = new com . amazonaws . internal . SdkInternalList < TrustedAdvisorCheckDescription > ( checks ) ;
public class UIUtil { /** * Show keyboard and focus to given EditText * @ param context Context * @ param target EditText to focus */ public static void showKeyboard ( Context context , EditText target ) { } }
if ( context == null || target == null ) { return ; } InputMethodManager imm = getInputMethodManager ( context ) ; imm . showSoftInput ( target , InputMethodManager . SHOW_IMPLICIT ) ;
public class AbstractAuditEventMessageImpl { /** * Create and add an Active Participant block to this audit event message but automatically * determine the Network Access Point ID Type Code * @ param userID The Active Participant ' s UserID * @ param altUserID The Active Participant ' s Alternate UserID * @ param userName The Active Participant ' s UserName * @ param userIsRequestor Whether this Active Participant is a requestor * @ param roleIdCodes The Active Participant ' s Role Codes * @ param networkAccessPointID The Active Participant ' s Network Access Point ID ( IP / Hostname ) * @ return The Active Participant block created */ protected ActiveParticipantType addActiveParticipant ( String userID , String altUserID , String userName , Boolean userIsRequestor , List < CodedValueType > roleIdCodes , String networkAccessPointID ) { } }
// Does lookup to see if using IP Address or hostname in Network Access Point ID return addActiveParticipant ( userID , altUserID , userName , userIsRequestor , roleIdCodes , networkAccessPointID , getNetworkAccessPointCodeFromAddress ( networkAccessPointID ) ) ;
public class Sets { /** * Create a subset using a { @ code Criteria } * @ param set original set * @ param criteria criteria * @ param < T > type * @ return a new subset containing items form the original set that met the criteria */ public static < T > Set < T > subset ( Set < T > set , Criteria < T > criteria ) { } }
Set < T > subset = new HashSet < T > ( ) ; for ( T item : set ) { if ( criteria . meetsCriteria ( item ) ) { subset . add ( item ) ; } } return subset ;
public class JXMapViewer { /** * the method that does the actual painting */ private void doPaintComponent ( Graphics g ) { } }
/* * if ( isOpaque ( ) | | isDesignTime ( ) ) { g . setColor ( getBackground ( ) ) ; g . fillRect ( 0,0 , getWidth ( ) , getHeight ( ) ) ; } */ if ( isDesignTime ( ) ) { // do nothing } else { int z = getZoom ( ) ; Rectangle viewportBounds = getViewportBounds ( ) ; drawMapTiles ( g , z , viewportBounds ) ; drawOverlays ( z , g , viewportBounds ) ; } super . paintBorder ( g ) ;
public class MapOutputCorrectness { /** * Which mapper sent this key sum ? * @ param key Key to check * @ param numReducers Number of reducers * @ param maxKeySpace Max key space * @ return Mapper that send this key sum */ private static int getMapperId ( long key , int numReducers , int maxKeySpace ) { } }
key = key - getFirstSumKey ( numReducers , maxKeySpace ) ; return ( int ) ( key / numReducers ) ;
public class TableUtils { /** * Guava { @ link Function } to transform a cell to its column key . */ public static < R , C , V > Function < Table . Cell < R , C , V > , C > toColumnKeyFunction ( ) { } }
return new Function < Table . Cell < R , C , V > , C > ( ) { @ Override public C apply ( final Table . Cell < R , C , V > input ) { return input . getColumnKey ( ) ; } } ;
public class Secp256k1Loader { /** * Deleted old native libraries e . g . on Windows the DLL file is not removed * on VM - Exit ( bug # 80) */ static void cleanup ( ) { } }
String tempFolder = getTempDir ( ) . getAbsolutePath ( ) ; File dir = new File ( tempFolder ) ; File [ ] nativeLibFiles = dir . listFiles ( new FilenameFilter ( ) { private final String searchPattern = "secp256k1-" ; public boolean accept ( File dir , String name ) { return name . startsWith ( searchPattern ) && ! name . endsWith ( ".lck" ) ; } } ) ; if ( nativeLibFiles != null ) { for ( File nativeLibFile : nativeLibFiles ) { File lckFile = new File ( nativeLibFile . getAbsolutePath ( ) + ".lck" ) ; if ( ! lckFile . exists ( ) ) { try { nativeLibFile . delete ( ) ; } catch ( SecurityException e ) { System . err . println ( "Failed to delete old native lib" + e . getMessage ( ) ) ; } } } }
public class ContentsDao { /** * Delete the collection of Contents , cascading optionally including the * user table * @ param contentsCollection * contents collection * @ param userTable * true if a user table * @ return deleted count * @ throws SQLException * upon deletion error */ public int deleteCascade ( Collection < Contents > contentsCollection , boolean userTable ) throws SQLException { } }
int count = 0 ; if ( contentsCollection != null ) { for ( Contents contents : contentsCollection ) { count += deleteCascade ( contents , userTable ) ; } } return count ;
public class ApiClientTransportFactory { /** * < p > newTransport . < / p > * @ param apitraryApi a { @ link com . apitrary . api . ApitraryApi } object . * @ return a { @ link com . apitrary . api . transport . Transport } object . */ public Transport newTransport ( ApitraryApi apitraryApi ) { } }
List < Class < Transport > > knownTransports = getAvailableTransports ( ) ; if ( knownTransports . isEmpty ( ) ) { throw new ApiTransportException ( "No transport provider available. Is there one on the classpath?" ) ; } return newTransport ( apitraryApi , knownTransports . get ( knownTransports . size ( ) - 1 ) ) ;
public class Decoder { /** * Reads a code of length 8 in an array of bits , padding with zeros */ private static byte readByte ( boolean [ ] rawbits , int startIndex ) { } }
int n = rawbits . length - startIndex ; if ( n >= 8 ) { return ( byte ) readCode ( rawbits , startIndex , 8 ) ; } return ( byte ) ( readCode ( rawbits , startIndex , n ) << ( 8 - n ) ) ;
public class TrainingsImpl { /** * Queues project for training . * @ param projectId The project id * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the Iteration object */ public Observable < Iteration > trainProjectAsync ( UUID projectId ) { } }
return trainProjectWithServiceResponseAsync ( projectId ) . map ( new Func1 < ServiceResponse < Iteration > , Iteration > ( ) { @ Override public Iteration call ( ServiceResponse < Iteration > response ) { return response . body ( ) ; } } ) ;
public class WebApp { /** * ServletContextFactories */ public boolean isFeatureEnabled ( com . ibm . websphere . servlet . container . WebContainer . Feature feature ) { } }
return this . features . contains ( feature ) ;
public class RequestHandler { /** * Add the service to the node . * @ param request the request which contains infos about the service and node to add . * @ return an observable which contains the newly created service . */ public Observable < Service > addService ( final AddServiceRequest request ) { } }
LOGGER . debug ( "Got instructed to add Service {}, to Node {}" , request . type ( ) , request . hostname ( ) ) ; return nodeBy ( request . hostname ( ) ) . addService ( request ) ;
public class DateTime { /** * Returns a copy of this datetime minus the specified number of hours . * The calculation will subtract a duration equivalent to the number of * hours expressed in milliseconds . * For example , if a spring daylight savings cutover is from 01:59 to 03:00 * then subtracting one hour from 03:30 will result in 01:30 . This is a * duration of one hour earlier , even though the hour field value changed * from 3 to 1. * The following three lines are identical in effect : * < pre > * DateTime subtracted = dt . minusHours ( 6 ) ; * DateTime subtracted = dt . minus ( Period . hours ( 6 ) ) ; * DateTime subtracted = dt . withFieldAdded ( DurationFieldType . hours ( ) , - 6 ) ; * < / pre > * This datetime instance is immutable and unaffected by this method call . * @ param hours the amount of hours to subtract , may be negative * @ return the new datetime minus the increased hours * @ since 1.1 */ public DateTime minusHours ( int hours ) { } }
if ( hours == 0 ) { return this ; } long instant = getChronology ( ) . hours ( ) . subtract ( getMillis ( ) , hours ) ; return withMillis ( instant ) ;
public class ClassProject { /** * GetFullPackage Method . */ public String getFullPackage ( CodeType codeType , String packageName ) { } }
if ( packageName == null ) packageName = DBConstants . BLANK ; if ( packageName . length ( ) > 0 ) if ( ! packageName . startsWith ( "." ) ) return packageName ; Record programControl = null ; if ( this . getRecordOwner ( ) != null ) programControl = ( Record ) this . getRecordOwner ( ) . getRecord ( ProgramControl . PROGRAM_CONTROL_FILE ) ; Record programControlTemp = null ; if ( programControl == null ) programControl = programControlTemp = new ProgramControl ( this . findRecordOwner ( ) ) ; String startPackage = DBConstants . ROOT_PACKAGE . substring ( 0 , DBConstants . ROOT_PACKAGE . length ( ) - 1 ) ; if ( codeType == CodeType . THICK ) if ( ! programControl . getField ( ProgramControl . PACKAGE_NAME ) . isNull ( ) ) { if ( programControl . getField ( ProgramControl . PACKAGE_NAME ) . toString ( ) . startsWith ( "." ) ) startPackage = startPackage + programControl . getField ( ProgramControl . PACKAGE_NAME ) . toString ( ) ; else startPackage = programControl . getField ( ProgramControl . PACKAGE_NAME ) . toString ( ) ; } if ( codeType == CodeType . THIN ) { if ( ! programControl . getField ( ProgramControl . THIN_PACKAGE ) . isNull ( ) ) { if ( programControl . getField ( ProgramControl . THIN_PACKAGE ) . toString ( ) . startsWith ( "." ) ) startPackage = startPackage + programControl . getField ( ProgramControl . THIN_PACKAGE ) . toString ( ) ; else startPackage = programControl . getField ( ProgramControl . THIN_PACKAGE ) . toString ( ) ; } else startPackage = startPackage + ".thin" ; } if ( ( codeType == CodeType . RESOURCE_CODE ) || ( codeType == CodeType . RESOURCE_PROPERTIES ) ) { if ( ! programControl . getField ( ProgramControl . RESOURCE_PACKAGE ) . isNull ( ) ) { if ( programControl . getField ( ProgramControl . RESOURCE_PACKAGE ) . toString ( ) . startsWith ( "." ) ) startPackage = startPackage + programControl . getField ( ProgramControl . RESOURCE_PACKAGE ) . toString ( ) ; else startPackage = programControl . getField ( ProgramControl . RESOURCE_PACKAGE ) . toString ( ) ; } else startPackage = startPackage + ".res" ; } if ( codeType == CodeType . INTERFACE ) { if ( ! programControl . getField ( ProgramControl . INTERFACE_PACKAGE ) . isNull ( ) ) { if ( programControl . getField ( ProgramControl . INTERFACE_PACKAGE ) . toString ( ) . startsWith ( "." ) ) startPackage = startPackage + programControl . getField ( ProgramControl . INTERFACE_PACKAGE ) . toString ( ) ; else startPackage = programControl . getField ( ProgramControl . INTERFACE_PACKAGE ) . toString ( ) ; } else startPackage = startPackage + ".model" ; } String fullPackage = this . getPath ( codeType , true ) ; if ( fullPackage == null ) fullPackage = DBConstants . BLANK ; if ( ( fullPackage . length ( ) == 0 ) || ( fullPackage . startsWith ( "." ) ) ) fullPackage = startPackage + fullPackage ; if ( fullPackage . endsWith ( "." ) ) fullPackage = fullPackage . substring ( 0 , fullPackage . length ( ) - 1 ) ; if ( packageName . startsWith ( "." ) ) packageName = packageName . substring ( 1 ) ; if ( programControlTemp != null ) programControlTemp . free ( ) ; if ( fullPackage . length ( ) == 0 ) return packageName ; else if ( packageName . length ( ) == 0 ) return fullPackage ; return fullPackage + '.' + packageName ;
public class DecodedBitStreamParser { /** * Text Compaction mode ( see 5.4.1.5 ) permits all printable ASCII characters to be * encoded , i . e . values 32 - 126 inclusive in accordance with ISO / IEC 646 ( IRV ) , as * well as selected control characters . * @ param codewords The array of codewords ( data + error ) * @ param codeIndex The current index into the codeword array . * @ param result The decoded data is appended to the result . * @ return The next index into the codeword array . */ private static int textCompaction ( int [ ] codewords , int codeIndex , StringBuilder result ) { } }
// 2 character per codeword int [ ] textCompactionData = new int [ ( codewords [ 0 ] - codeIndex ) * 2 ] ; // Used to hold the byte compaction value if there is a mode shift int [ ] byteCompactionData = new int [ ( codewords [ 0 ] - codeIndex ) * 2 ] ; int index = 0 ; boolean end = false ; while ( ( codeIndex < codewords [ 0 ] ) && ! end ) { int code = codewords [ codeIndex ++ ] ; if ( code < TEXT_COMPACTION_MODE_LATCH ) { textCompactionData [ index ] = code / 30 ; textCompactionData [ index + 1 ] = code % 30 ; index += 2 ; } else { switch ( code ) { case TEXT_COMPACTION_MODE_LATCH : // reinitialize text compaction mode to alpha sub mode textCompactionData [ index ++ ] = TEXT_COMPACTION_MODE_LATCH ; break ; case BYTE_COMPACTION_MODE_LATCH : case BYTE_COMPACTION_MODE_LATCH_6 : case NUMERIC_COMPACTION_MODE_LATCH : case BEGIN_MACRO_PDF417_CONTROL_BLOCK : case BEGIN_MACRO_PDF417_OPTIONAL_FIELD : case MACRO_PDF417_TERMINATOR : codeIndex -- ; end = true ; break ; case MODE_SHIFT_TO_BYTE_COMPACTION_MODE : // The Mode Shift codeword 913 shall cause a temporary // switch from Text Compaction mode to Byte Compaction mode . // This switch shall be in effect for only the next codeword , // after which the mode shall revert to the prevailing sub - mode // of the Text Compaction mode . Codeword 913 is only available // in Text Compaction mode ; its use is described in 5.4.2.4. textCompactionData [ index ] = MODE_SHIFT_TO_BYTE_COMPACTION_MODE ; code = codewords [ codeIndex ++ ] ; byteCompactionData [ index ] = code ; index ++ ; break ; } } } decodeTextCompaction ( textCompactionData , byteCompactionData , index , result ) ; return codeIndex ;
public class ExceptionWrapper { /** * Simulates a printing of a stack trace by printing the string * stack trace */ public void printStackTrace ( PrintStream printStream ) { } }
if ( mStackTrace == null ) { printStream . print ( getMessage ( ) ) ; } else { printStream . print ( mStackTrace ) ; }
public class WSHelper { /** * Returns required attachment value from webservice deployment . * @ param < A > expected value * @ param dep webservice deployment * @ param key attachment key * @ return required attachment * @ throws IllegalStateException if attachment value is null */ public static < A > A getRequiredAttachment ( final Deployment dep , final Class < A > key ) { } }
final A value = dep . getAttachment ( key ) ; if ( value == null ) { throw Messages . MESSAGES . cannotFindAttachmentInDeployment ( key , dep . getSimpleName ( ) ) ; } return value ;
public class Color { /** * Converts HSL ( hue , saturation , lightness ) to RGB . * HSL values should already be normalized to [ 0,1] * @ param h in [ 0,1] * @ param s in [ 0,1] * @ param l in [ 0,1] * @ return Color with RGB values */ public static final Color fromNormalizedHSL ( final double h , final double s , final double l ) { } }
// see http : / / www . w3 . org / TR / css3 - color / // HOW TO RETURN hsl . to . rgb ( h , s , l ) : // SELECT : // l < = 0.5 : PUT l * ( s + 1 ) IN m2 // ELSE : PUT l + s - l * s IN m2 // PUT l * 2 - m2 IN m1 // PUT hue . to . rgb ( m1 , m2 , h + 1/3 ) IN r // PUT hue . to . rgb ( m1 , m2 , h ) IN g // PUT hue . to . rgb ( m1 , m2 , h - 1/3 ) IN b // RETURN ( r , g , b ) final double m2 = ( l <= 0.5 ) ? ( l * ( s + 1 ) ) : ( ( l + s ) - ( l * s ) ) ; final double m1 = ( l * 2 ) - m2 ; return new Color ( fixRGB ( ( int ) Math . round ( 255 * hueToRGB ( m1 , m2 , h + ( 1.0 / 3 ) ) ) ) , fixRGB ( ( int ) Math . round ( 255 * hueToRGB ( m1 , m2 , h ) ) ) , fixRGB ( ( int ) Math . round ( 255 * hueToRGB ( m1 , m2 , h - ( 1.0 / 3 ) ) ) ) ) ;
public class KernelPoints { /** * Adds a new Kernel Point to the internal list this object represents . The * new Kernel Point will be equivalent to creating a new KernelPoint * directly . */ public void addNewKernelPoint ( ) { } }
KernelPoint source = points . get ( 0 ) ; KernelPoint toAdd = new KernelPoint ( k , errorTolerance ) ; toAdd . setMaxBudget ( maxBudget ) ; toAdd . setBudgetStrategy ( budgetStrategy ) ; standardMove ( toAdd , source ) ; toAdd . kernelAccel = source . kernelAccel ; toAdd . vecs = source . vecs ; toAdd . alpha = new DoubleList ( source . alpha . size ( ) ) ; for ( int i = 0 ; i < source . alpha . size ( ) ; i ++ ) toAdd . alpha . add ( 0.0 ) ; points . add ( toAdd ) ;
public class ContentKeyPoliciesInner { /** * Update a Content Key Policy . * Updates an existing Content Key Policy in the Media Services account . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ param contentKeyPolicyName The Content Key Policy name . * @ param parameters The request parameters * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ContentKeyPolicyInner > updateAsync ( String resourceGroupName , String accountName , String contentKeyPolicyName , ContentKeyPolicyInner parameters , final ServiceCallback < ContentKeyPolicyInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , accountName , contentKeyPolicyName , parameters ) , serviceCallback ) ;
public class FieldAnnotationParser { /** * Parse { @ link ViewId } annotation and try to assign the view with that id to the annotated field . * It will throw a { @ link ClassCastException } if the field and the view with the given ID have different types . * @ param object object where the annotation is . * @ param view parent view that contains a view with the viewId given in the annotation . */ public static void setViewFields ( final Object object , final View view ) { } }
setViewFields ( object , new ViewFinder ( ) { @ Override public View findViewById ( int viewId ) { return view . findViewById ( viewId ) ; } } ) ;
public class SimpleBase { /** * Sets all the elements in this matrix equal to the specified value . < br > * < br > * a < sub > ij < / sub > = val < br > * @ see CommonOps _ DDRM # fill ( DMatrixD1 , double ) * @ param val The value each element is set to . */ public void fill ( double val ) { } }
try { ops . fill ( mat , val ) ; } catch ( ConvertToDenseException e ) { convertToDense ( ) ; fill ( val ) ; }
public class SwingGUIListener { /** * Processes the IOSettings by listing the question , giving the options * and asking the user to provide their choice . * < p > Note : if the input reader is < code > null < / code > , then the method * does not wait for an answer , and takes the default . */ @ Override public void processIOSettingQuestion ( IOSetting setting ) { } }
// post the question if ( setting . getLevel ( ) . ordinal ( ) <= this . level . ordinal ( ) ) { String answer = setting . getSetting ( ) ; if ( setting instanceof BooleanIOSetting ) { int n = JOptionPane . showConfirmDialog ( frame , setting . getQuestion ( ) , setting . getName ( ) , JOptionPane . YES_NO_OPTION ) ; if ( n == JOptionPane . YES_OPTION ) { answer = "true" ; } else if ( n == JOptionPane . NO_OPTION ) { answer = "false" ; } else { // default of setting } } else if ( setting instanceof OptionIOSetting ) { OptionIOSetting optionSetting = ( OptionIOSetting ) setting ; List < String > settings = optionSetting . getOptions ( ) ; Iterator < String > elements = settings . iterator ( ) ; Object [ ] options = new Object [ settings . size ( ) ] ; for ( int i = 0 ; i < options . length ; i ++ ) { options [ i ] = elements . next ( ) ; } int n = JOptionPane . showOptionDialog ( frame , setting . getQuestion ( ) , setting . getName ( ) , JOptionPane . DEFAULT_OPTION , JOptionPane . QUESTION_MESSAGE , null , options , setting . getSetting ( ) ) ; answer = ( String ) options [ n ] ; } else if ( setting instanceof StringIOSetting ) { answer = JOptionPane . showInputDialog ( frame , setting . getQuestion ( ) , setting . getName ( ) , JOptionPane . QUESTION_MESSAGE , null , null , setting . getSetting ( ) ) . toString ( ) ; } else { answer = JOptionPane . showInputDialog ( frame , setting . getQuestion ( ) , setting . getName ( ) , JOptionPane . QUESTION_MESSAGE , null , null , setting . getSetting ( ) ) . toString ( ) ; } try { setting . setSetting ( answer ) ; } catch ( CDKException exception ) { } } // else skip question
public class S3Utils { /** * { @ inheritDoc } */ @ Override public void delete ( String path ) throws IOException , URISyntaxException { } }
URI uri = new URI ( path ) ; String bucketName = uri . getHost ( ) ; String key = uri . getPath ( ) . substring ( 1 , uri . getPath ( ) . length ( ) ) ; List < DeleteObjectsRequest . KeyVersion > keys = new ArrayList < DeleteObjectsRequest . KeyVersion > ( ) ; String marker = null ; for ( ; ; ) { ObjectListing ol = s3 . listObjects ( new ListObjectsRequest ( ) . withBucketName ( bucketName ) . withPrefix ( key ) . withMarker ( marker ) ) ; for ( S3ObjectSummary objectSummary : ol . getObjectSummaries ( ) ) { keys . add ( new DeleteObjectsRequest . KeyVersion ( objectSummary . getKey ( ) ) ) ; } marker = ol . getNextMarker ( ) ; if ( marker == null ) { break ; } } s3 . deleteObjects ( new DeleteObjectsRequest ( bucketName ) . withKeys ( keys ) ) ; s3 . deleteObject ( bucketName , key ) ;
public class Director { /** * Clean up the downloaded install assets ; * reset installAssets and uninstallAssets . */ public void cleanUp ( ) { } }
fireProgressEvent ( InstallProgressEvent . CLEAN_UP , 98 , Messages . INSTALL_KERNEL_MESSAGES . getLogMessage ( "STATE_CLEANING" ) ) ; if ( installAssets != null ) { for ( List < InstallAsset > iaList : installAssets ) { for ( InstallAsset asset : iaList ) { asset . delete ( ) ; } } } boolean del = InstallUtils . deleteDirectory ( this . product . getInstallTempDir ( ) ) ; if ( ! del ) this . product . getInstallTempDir ( ) . deleteOnExit ( ) ; installAssets = null ; setScriptsPermission = false ; if ( resolveDirector != null ) resolveDirector . cleanUp ( ) ; if ( uninstallDirector != null ) uninstallDirector . cleanUp ( ) ; log ( Level . FINE , Messages . INSTALL_KERNEL_MESSAGES . getLogMessage ( "MSG_CLEANUP_SUCCESS" ) ) ;
public class RuleBasedBreakIterator { /** * The State Machine Engine for moving forward is here . * This function is the heart of the RBBI run time engine . * @ param stateTable * @ return the new iterator position * A note on supplementary characters and the position of underlying * Java CharacterIterator : Normally , a character iterator is positioned at * the char most recently returned by next ( ) . Within this function , when * a supplementary char is being processed , the char iterator is left * sitting on the trail surrogate , in the middle of the code point . * This is different from everywhere else , where an iterator always * points at the lead surrogate of a supplementary . */ private int handleNext ( short stateTable [ ] ) { } }
if ( TRACE ) { System . out . println ( "Handle Next pos char state category" ) ; } // No matter what , handleNext alway correctly sets the break tag value . fLastStatusIndexValid = true ; fLastRuleStatusIndex = 0 ; // caches for quicker access CharacterIterator text = fText ; CharTrie trie = fRData . fTrie ; // Set up the starting char int c = text . current ( ) ; if ( c >= UTF16 . LEAD_SURROGATE_MIN_VALUE ) { c = nextTrail32 ( text , c ) ; if ( c == DONE32 ) { return BreakIterator . DONE ; } } int initialPosition = text . getIndex ( ) ; int result = initialPosition ; // Set the initial state for the state machine int state = START_STATE ; int row = fRData . getRowIndex ( state ) ; short category = 3 ; int flagsState = fRData . getStateTableFlags ( stateTable ) ; int mode = RBBI_RUN ; if ( ( flagsState & RBBIDataWrapper . RBBI_BOF_REQUIRED ) != 0 ) { category = 2 ; mode = RBBI_START ; if ( TRACE ) { System . out . print ( " " + RBBIDataWrapper . intToString ( text . getIndex ( ) , 5 ) ) ; System . out . print ( RBBIDataWrapper . intToHexString ( c , 10 ) ) ; System . out . println ( RBBIDataWrapper . intToString ( state , 7 ) + RBBIDataWrapper . intToString ( category , 6 ) ) ; } } fLookAheadMatches . reset ( ) ; // loop until we reach the end of the text or transition to state 0 while ( state != STOP_STATE ) { if ( c == DONE32 ) { // Reached end of input string . if ( mode == RBBI_END ) { // We have already run the loop one last time with the // character set to the pseudo { eof } value . Now it is time // to unconditionally bail out . break ; } // Run the loop one last time with the fake end - of - input character category mode = RBBI_END ; category = 1 ; } else if ( mode == RBBI_RUN ) { // Get the char category . An incoming category of 1 or 2 mens that // we are preset for doing the beginning or end of input , and // that we shouldn ' t get a category from an actual text input character . // look up the current character ' s character category , which tells us // which column in the state table to look at . category = ( short ) trie . getCodePointValue ( c ) ; // Check the dictionary bit in the character ' s category . // Counter is only used by dictionary based iterators ( subclasses ) . // Chars that need to be handled by a dictionary have a flag bit set // in their category values . if ( ( category & 0x4000 ) != 0 ) { fDictionaryCharCount ++ ; // And off the dictionary flag bit . category &= ~ 0x4000 ; } if ( TRACE ) { System . out . print ( " " + RBBIDataWrapper . intToString ( text . getIndex ( ) , 5 ) ) ; System . out . print ( RBBIDataWrapper . intToHexString ( c , 10 ) ) ; System . out . println ( RBBIDataWrapper . intToString ( state , 7 ) + RBBIDataWrapper . intToString ( category , 6 ) ) ; } // Advance to the next character . // If this is a beginning - of - input loop iteration , don ' t advance . // The next iteration will be processing the first real input character . c = text . next ( ) ; if ( c >= UTF16 . LEAD_SURROGATE_MIN_VALUE ) { c = nextTrail32 ( text , c ) ; } } else { mode = RBBI_RUN ; } // look up a state transition in the state table state = stateTable [ row + RBBIDataWrapper . NEXTSTATES + category ] ; row = fRData . getRowIndex ( state ) ; if ( stateTable [ row + RBBIDataWrapper . ACCEPTING ] == - 1 ) { // Match found , common case result = text . getIndex ( ) ; if ( c >= UTF16 . SUPPLEMENTARY_MIN_VALUE && c <= UTF16 . CODEPOINT_MAX_VALUE ) { // The iterator has been left in the middle of a surrogate pair . // We want the start of it . result -- ; } // Remember the break status ( tag ) values . fLastRuleStatusIndex = stateTable [ row + RBBIDataWrapper . TAGIDX ] ; } int completedRule = stateTable [ row + RBBIDataWrapper . ACCEPTING ] ; if ( completedRule > 0 ) { // Lookahead match is completed int lookaheadResult = fLookAheadMatches . getPosition ( completedRule ) ; if ( lookaheadResult >= 0 ) { fLastRuleStatusIndex = stateTable [ row + RBBIDataWrapper . TAGIDX ] ; text . setIndex ( lookaheadResult ) ; return lookaheadResult ; } } int rule = stateTable [ row + RBBIDataWrapper . LOOKAHEAD ] ; if ( rule != 0 ) { // At the position of a ' / ' in a look - ahead match . Record it . int pos = text . getIndex ( ) ; if ( c >= UTF16 . SUPPLEMENTARY_MIN_VALUE && c <= UTF16 . CODEPOINT_MAX_VALUE ) { // The iterator has been left in the middle of a surrogate pair . // We want the beginning of it . pos -- ; } fLookAheadMatches . setPosition ( rule , pos ) ; } } // End of state machine main loop // The state machine is done . Check whether it found a match . . . // If the iterator failed to advance in the match engine force it ahead by one . // This indicates a defect in the break rules , which should always match // at least one character . if ( result == initialPosition ) { if ( TRACE ) { System . out . println ( "Iterator did not move. Advancing by 1." ) ; } text . setIndex ( initialPosition ) ; next32 ( text ) ; result = text . getIndex ( ) ; } else { // Leave the iterator at our result position . // ( we may have advanced beyond the last accepting position chasing after // longer matches that never completed . ) text . setIndex ( result ) ; } if ( TRACE ) { System . out . println ( "result = " + result ) ; } return result ;
public class Counters { /** * Returns the value of the smallest entry in this counter . * @ param c * The Counter ( not modified ) * @ return The minimum value in the Counter */ public static < E > double min ( Counter < E > c ) { } }
double min = Double . POSITIVE_INFINITY ; for ( double v : c . values ( ) ) { min = Math . min ( min , v ) ; } return min ;
public class Directories { /** * The snapshot must exist */ public long snapshotCreationTime ( String snapshotName ) { } }
for ( File dir : dataPaths ) { File snapshotDir = new File ( dir , join ( SNAPSHOT_SUBDIR , snapshotName ) ) ; if ( snapshotDir . exists ( ) ) return snapshotDir . lastModified ( ) ; } throw new RuntimeException ( "Snapshot " + snapshotName + " doesn't exist" ) ;
public class JsonSerializer { /** * Serializes the given object in JSON and returns the resulting string . In * case of errors , null is returned . In particular , this happens if the * object is not based on a Jackson - annotated class . An error is logged in * this case . * @ param object * object to serialize * @ return JSON serialization or null */ protected static String jacksonObjectToString ( Object object ) { } }
try { return mapper . writeValueAsString ( object ) ; } catch ( JsonProcessingException e ) { logger . error ( "Failed to serialize JSON data: " + e . toString ( ) ) ; return null ; }
public class CommerceShippingFixedOptionRelUtil { /** * Returns an ordered range of all the commerce shipping fixed option rels where commerceShippingMethodId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceShippingFixedOptionRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceShippingMethodId the commerce shipping method ID * @ param start the lower bound of the range of commerce shipping fixed option rels * @ param end the upper bound of the range of commerce shipping fixed option rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce shipping fixed option rels */ public static List < CommerceShippingFixedOptionRel > findByCommerceShippingMethodId ( long commerceShippingMethodId , int start , int end , OrderByComparator < CommerceShippingFixedOptionRel > orderByComparator ) { } }
return getPersistence ( ) . findByCommerceShippingMethodId ( commerceShippingMethodId , start , end , orderByComparator ) ;
public class BuildableType_Builder { /** * Sets the value to be returned by { @ link BuildableType # partialToBuilder ( ) } . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code partialToBuilder } is null */ public BuildableType . Builder partialToBuilder ( PartialToBuilderMethod partialToBuilder ) { } }
this . partialToBuilder = Objects . requireNonNull ( partialToBuilder ) ; _unsetProperties . remove ( Property . PARTIAL_TO_BUILDER ) ; return ( BuildableType . Builder ) this ;
public class BplusTree { /** * Put the key / value in the subtree rooted for nodeid ( this function is recursive ) * If node is split by this operation then the return value is the Node * that was created when node was split * @ param key the key to add * @ param value the value to add * @ param nodeid the nodeid * @ return a new node that was created when node was split , or null if u was not split * @ throws DuplicateKeyException */ protected Node < K , V > putRecursive ( K key , final V value , final int nodeid ) throws DuplicateKeyException { } }
final Node < K , V > nodeFind = getNode ( nodeid ) ; if ( nodeFind == null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( this . getClass ( ) . getName ( ) + "::putRecursive getNode(" + nodeid + ")=null" ) ; } } int slot = nodeFind . findSlotByKey ( key ) ; if ( slot >= 0 ) { if ( nodeFind . isLeaf ( ) ) { // leaf node , just reset it final LeafNode < K , V > node = ( LeafNode < K , V > ) nodeFind ; node . set ( slot , value ) ; putNode ( node ) ; throw new DuplicateKeyException ( ) ; } } slot = ( ( slot < 0 ) ? ( - slot ) - 1 : slot + 1 ) ; if ( nodeFind . isLeaf ( ) ) { // leaf node , just add it final LeafNode < K , V > node = ( LeafNode < K , V > ) nodeFind ; node . add ( slot , key , value ) ; putNode ( node ) ; } else { final InternalNode < K , V > node = ( InternalNode < K , V > ) nodeFind ; final Node < K , V > splitedNode = putRecursive ( key , value , node . childs [ slot ] ) ; if ( splitedNode != null ) { // child was split , splitedNode is new child key = splitedNode . splitShiftKeysLeft ( ) ; putNode ( splitedNode ) ; node . add ( slot , key , splitedNode . id ) ; putNode ( node ) ; } } return nodeFind . isFull ( ) ? nodeFind . split ( ) : null ;
public class WidgetUtil { /** * Gets the pre - made HTML Widget for the specified guild using the specified * settings . The widget will only display correctly if the guild in question * has the Widget enabled . * @ param guild * the guild * @ param theme * the theme , light or dark * @ param width * the width of the widget * @ param height * the height of the widget * @ return a String containing the pre - made widget with the supplied settings */ public static String getPremadeWidgetHtml ( Guild guild , WidgetTheme theme , int width , int height ) { } }
Checks . notNull ( guild , "Guild" ) ; return getPremadeWidgetHtml ( guild . getId ( ) , theme , width , height ) ;
public class WorkflowClient { /** * Skips a given task from a current RUNNING workflow * @ param workflowId the id of the workflow instance * @ param taskReferenceName the reference name of the task to be skipped */ public void skipTaskFromWorkflow ( String workflowId , String taskReferenceName ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( workflowId ) , "workflow id cannot be blank" ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( taskReferenceName ) , "Task reference name cannot be blank" ) ; put ( "workflow/{workflowId}/skiptask/{taskReferenceName}" , null , workflowId , taskReferenceName ) ;
public class SwaggerController { /** * Serves the Swagger UI which allows you to try out the documented endpoints . Sets the url * parameter to the swagger yaml that describes the REST API . Creates an apiKey token for the * current user . */ @ GetMapping public String init ( Model model ) { } }
final UriComponents uriComponents = ServletUriComponentsBuilder . fromCurrentContextPath ( ) . build ( ) ; model . addAttribute ( "molgenisUrl" , uriComponents . toUriString ( ) + URI + "/swagger.yml" ) ; model . addAttribute ( "baseUrl" , uriComponents . toUriString ( ) ) ; final String currentUsername = SecurityUtils . getCurrentUsername ( ) ; if ( currentUsername != null ) { model . addAttribute ( "token" , tokenService . generateAndStoreToken ( currentUsername , "For Swagger UI" ) ) ; } return "view-swagger-ui" ;
public class BasePersistence { /** * { @ inheritDoc } */ @ Override public List < T > findAll ( int firstResult , int maxResults ) { } }
return getPersistenceProvider ( ) . findAll ( persistenceClass , firstResult , maxResults ) ;
public class Proxies { /** * Create a proxy for the given { @ link Class } type and { @ link ForgeProxy } handler . */ @ SuppressWarnings ( "unchecked" ) public static < T > T enhance ( Class < T > type , ForgeProxy handler ) { } }
Assert . notNull ( type , "Class type to proxy must not be null" ) ; Assert . notNull ( handler , "ForgeProxy handler must not be null" ) ; Object result = null ; Class < ? > proxyType = getCachedProxyType ( type . getClassLoader ( ) , type ) ; if ( proxyType == null ) { Class < ? > [ ] hierarchy = null ; Class < ? > superclass = null ; if ( type . isInterface ( ) && ! ForgeProxy . class . isAssignableFrom ( type ) ) hierarchy = new Class < ? > [ ] { type , ForgeProxy . class } ; else if ( type . isInterface ( ) ) hierarchy = new Class < ? > [ ] { type } ; else { if ( Proxies . isProxyType ( type ) ) superclass = unwrapProxyTypes ( type ) ; else { superclass = type ; hierarchy = new Class < ? > [ ] { ForgeProxy . class } ; } } ProxyFactory f = new ProxyFactory ( ) ; f . setFilter ( filter ) ; f . setInterfaces ( hierarchy ) ; f . setSuperclass ( superclass ) ; proxyType = f . createClass ( ) ; setCachedProxyType ( type . getClassLoader ( ) , type , proxyType ) ; } try { result = proxyType . newInstance ( ) ; } catch ( InstantiationException e ) { throw new IllegalStateException ( "Could not instantiate proxy for type [" + type + "]. For optimal proxy compatibility, ensure " + "that this type is an interface, or a class with a default constructor." , e ) ; } catch ( IllegalAccessException e ) { throw new IllegalStateException ( e ) ; } if ( result instanceof Proxy ) ( ( Proxy ) result ) . setHandler ( handler ) ; else if ( result instanceof ProxyObject ) ( ( ProxyObject ) result ) . setHandler ( handler ) ; else throw new IllegalStateException ( "Could not set proxy handler [" + handler + "] for proxy object [" + result + "] for proxy of type [" + type + "]" ) ; return ( T ) result ;
public class Settings { /** * Gets setting or throws an IllegalArgumentException if not found * @ param name of setting to search for * @ return found value * @ throws IllegalArgumentException in case setting is not present */ private Object get ( String name ) { } }
Object value = find ( name ) ; if ( value == null ) { throw new IllegalArgumentException ( "Setting: '" + name + "', not found!" ) ; } return value ;
public class DateTimeExpression { /** * Create a month expression ( range 1-12 / JAN - DEC ) * @ return month */ public NumberExpression < Integer > month ( ) { } }
if ( month == null ) { month = Expressions . numberOperation ( Integer . class , Ops . DateTimeOps . MONTH , mixin ) ; } return month ;
public class RaftServiceManager { /** * Compacts logs up to the given index . * @ param compactIndex the index to which to compact logs */ private void compactLogs ( long compactIndex ) { } }
raft . getThreadContext ( ) . execute ( ( ) -> { logger . debug ( "Compacting logs up to index {}" , compactIndex ) ; try { raft . getLog ( ) . compact ( compactIndex ) ; } catch ( Exception e ) { logger . error ( "An exception occurred during log compaction: {}" , e ) ; } finally { this . compactFuture . complete ( null ) ; this . compactFuture = null ; // Immediately attempt to take new snapshots since compaction is already run after a time interval . takeSnapshots ( false , false ) ; } } ) ;
public class LinkedList { /** * Removes the element from the list and saves the element data structure for later reuse . * @ param element The item which is to be removed from the list */ public void remove ( Element < T > element ) { } }
if ( element . next == null ) { last = element . previous ; } else { element . next . previous = element . previous ; } if ( element . previous == null ) { first = element . next ; } else { element . previous . next = element . next ; } size -- ; element . clear ( ) ; available . push ( element ) ;
public class ClassLoaderUtils { /** * Locates the resource stream with the specified name . For Java 2 callers , * the current thread ' s context class loader is preferred , falling back on * the class loader of the caller when the current thread ' s context is not * set , or the caller is pre Java 2 . If the callerClassLoader is null , then * fall back on the system class loader . * @ param name the name of the resource * @ param callerClassLoader the calling class loader context * @ return the resulting < code > InputStream < / code > object */ public static InputStream getResourceAsStream ( String name , ClassLoader callerClassLoader ) { } }
_checkResourceName ( name ) ; InputStream stream = null ; ClassLoader loader = getContextClassLoader ( ) ; if ( loader != null ) { stream = loader . getResourceAsStream ( name ) ; } if ( stream == null ) { if ( callerClassLoader != null ) { stream = callerClassLoader . getResourceAsStream ( name ) ; } else { stream = ClassLoader . getSystemResourceAsStream ( name ) ; } } return stream ;
public class ResourceLocator { /** * 定位资源 。 首先查找指定类的加载路径 , 如果找不到 , 再由指定类的Class Loader从其CLASSPATH路径中查找 。 */ public static URL getResource ( Class < ? > c , String resName ) { } }
URL url = constructResourceURL ( c , resName ) ; if ( url == null ) return c . getResource ( resName ) ; InputStream is = null ; try { is = url . openStream ( ) ; is . read ( ) ; } catch ( Exception e ) { return c . getResource ( resName ) ; } finally { try { if ( is != null ) is . close ( ) ; } catch ( Exception e ) { } } return url ;
public class ApacheHttpClientDelegate { private HttpUriRequest addDefaultHeaders ( HttpUriRequest req ) { } }
req . addHeader ( HttpHeaders . ACCEPT , "*/*" ) ; req . addHeader ( HttpHeaders . CONTENT_TYPE , "application/json" ) ; return req ;
public class MyViewsTabBar { /** * Returns all the registered { @ link ListViewColumn } descriptors . */ public static DescriptorExtensionList < MyViewsTabBar , Descriptor < MyViewsTabBar > > all ( ) { } }
return Jenkins . getInstance ( ) . < MyViewsTabBar , Descriptor < MyViewsTabBar > > getDescriptorList ( MyViewsTabBar . class ) ;
public class CoreServiceImpl { /** * Inject a ServiceReference for the required / dynamic WsLocationAdmin service . */ @ Reference ( policy = ReferencePolicy . DYNAMIC ) protected void setLocation ( WsLocationAdmin locRef ) { } }
locServiceRef . set ( locRef ) ;
public class JmxServer { /** * Register the object parameter for exposure with JMX . The object passed in must have a { @ link JmxResource } * annotation or must implement { @ link JmxSelfNaming } . */ public synchronized ObjectName register ( Object obj ) throws JMException { } }
if ( mbeanServer == null ) { throw new JMException ( "JmxServer has not be started" ) ; } ObjectName objectName = ObjectNameUtil . makeObjectName ( obj ) ; ReflectionMbean mbean ; try { mbean = new ReflectionMbean ( obj , getObjectDescription ( obj ) ) ; } catch ( Exception e ) { throw createJmException ( "Could not build MBean object for: " + obj , e ) ; } doRegister ( objectName , mbean ) ; return objectName ;
public class DFA { /** * Calculates the maximum length of accepted string . Returns Integer . MAX _ VALUE * if length is infinite . For " if | while " returns 5 . For " a + " returns Integer . MAX _ VALUE . * @ return */ public int maxDepth ( ) { } }
Map < DFAState < T > , Integer > indexOf = new NumMap < > ( ) ; Map < DFAState < T > , Long > depth = new NumMap < > ( ) ; Deque < DFAState < T > > stack = new ArrayDeque < > ( ) ; maxDepth ( root , indexOf , stack , depth ) ; long d = depth . get ( root ) ; assert d >= 0 ; if ( d >= Integer . MAX_VALUE ) { return Integer . MAX_VALUE ; } else { return ( int ) d ; }
public class Security { /** * Returns true if the requested attribute value is supported ; * otherwise , returns false . */ private static boolean isConstraintSatisfied ( String attribute , String value , String prop ) { } }
// For KeySize , prop is the max key size the // provider supports for a specific < crypto _ service > . < algorithm > . if ( attribute . equalsIgnoreCase ( "KeySize" ) ) { int requestedSize = Integer . parseInt ( value ) ; int maxSize = Integer . parseInt ( prop ) ; if ( requestedSize <= maxSize ) { return true ; } else { return false ; } } // For Type , prop is the type of the implementation // for a specific < crypto service > . < algorithm > . if ( attribute . equalsIgnoreCase ( "ImplementedIn" ) ) { return value . equalsIgnoreCase ( prop ) ; } return false ;
public class LogLevelConverter { /** * Converts log level to a number . * @ param level a log level to convert . * @ return log level number value . */ public static int toInteger ( LogLevel level ) { } }
if ( level == LogLevel . Fatal ) return 1 ; if ( level == LogLevel . Error ) return 2 ; if ( level == LogLevel . Warn ) return 3 ; if ( level == LogLevel . Info ) return 4 ; if ( level == LogLevel . Debug ) return 5 ; if ( level == LogLevel . Trace ) return 6 ; return 0 ;
public class Nodes { /** * Extracts a Node ' s name , namespace URI ( if any ) and prefix as a * QName . */ public static QName getQName ( Node n ) { } }
String s = n . getLocalName ( ) ; String p = n . getPrefix ( ) ; return s != null ? new QName ( n . getNamespaceURI ( ) , s , p != null ? p : XMLConstants . DEFAULT_NS_PREFIX ) : new QName ( n . getNodeName ( ) ) ;
public class OptionSet { /** * Add a value option with the given key and separator , no details , and the default prefix and multiplicity * @ param key The key for the option * @ param separator The separator for the option * @ return The set instance itself ( to support invocation chaining for < code > addOption ( ) < / code > methods ) * @ throws IllegalArgumentException If the < code > key < / code > is < code > null < / code > or a key with this name has already been defined * or if < code > separator < / code > is < code > null < / code > */ public OptionSet addOption ( String key , Options . Separator separator ) { } }
return addOption ( key , false , separator , true , defaultMultiplicity ) ;
public class DiscretizedCircle { /** * Computes the offsets for a discretized circle of the specified radius for an * image with the specified width . * @ param radius The radius of the circle in pixels . * @ param imgWidth The row step of the image * @ return A list of offsets that describe the circle */ public static int [ ] imageOffsets ( double radius , int imgWidth ) { } }
double PI2 = Math . PI * 2.0 ; double circumference = PI2 * radius ; int num = ( int ) Math . ceil ( circumference ) ; num = num - num % 4 ; double angleStep = PI2 / num ; int temp [ ] = new int [ ( int ) Math . ceil ( circumference ) ] ; int i = 0 ; int prev = 0 ; for ( double ang = 0 ; ang < PI2 ; ang += angleStep ) { int x = ( int ) Math . round ( Math . cos ( ang ) * radius ) ; int y = ( int ) Math . round ( Math . sin ( ang ) * radius ) ; int pixel = y * imgWidth + x ; if ( pixel != prev ) { // System . out . println ( " i = " + i + " x = " + x + " y = " + y ) ; temp [ i ++ ] = pixel ; } prev = pixel ; } if ( i == temp . length ) return temp ; else { int ret [ ] = new int [ i ] ; System . arraycopy ( temp , 0 , ret , 0 , i ) ; return ret ; }
public class PerfStats { /** * Format all stats in the format specified . * @ param format serialization format * @ return serialized stats */ public static String getAllStats ( OutputFormat format ) { } }
StringBuilder sb = new StringBuilder ( ) ; boolean first = true ; Collection < PerfStatEntry > stats = perfStats . get ( ) . values ( ) ; switch ( format ) { case JSON : sb . append ( '{' ) ; for ( PerfStatEntry entry : stats ) { if ( entry . count > 0 ) { if ( first ) { first = false ; } else { sb . append ( ',' ) ; } sb . append ( '"' ) . append ( entry . name ) . append ( "\":" ) ; if ( entry . isErr ) sb . append ( "null" ) ; else sb . append ( entry . total ) ; } } sb . append ( '}' ) ; break ; default : sb . append ( "[" ) ; for ( PerfStatEntry entry : stats ) { if ( entry . count > 0 ) { if ( first ) { first = false ; } else { sb . append ( ", " ) ; } sb . append ( entry . toString ( ) ) ; } } sb . append ( "]" ) ; break ; } return sb . toString ( ) ;
public class CmsSearchTab { /** * Handles the change event of the locale select box . < p > * @ param event the change event */ @ UiHandler ( "m_localeSelection" ) protected void onLocaleChange ( ValueChangeEvent < String > event ) { } }
String value = event . getValue ( ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( value ) || value . equals ( NOT_SET_OPTION_VALUE ) ) { value = m_currentLocale ; } m_tabHandler . setLocale ( value ) ;
public class UnitResponse { /** * data to Page */ @ SuppressWarnings ( "unused" ) public Page dataToPage ( ) { } }
if ( data instanceof String ) { return Page . create ( data . toString ( ) ) ; } if ( data instanceof JSONObject ) { return Page . create ( ( JSONObject ) data ) ; } if ( data instanceof Page ) { return ( Page ) data ; } throw new RuntimeException ( toString ( ) + " 不是page!" ) ;
public class SingleSessionIoHandlerDelegate { /** * Delegates the method call to the * { @ link SingleSessionIoHandler # sessionClosed ( ) } method of the handler * assigned to this session . */ public void sessionClosed ( IoSession session ) throws Exception { } }
SingleSessionIoHandler handler = ( SingleSessionIoHandler ) session . getAttribute ( HANDLER ) ; handler . sessionClosed ( ) ;
public class AbstractHttpFileBuilder { /** * Sets the { @ code " cache - control " } header . This method is a shortcut of : * < pre > { @ code * builder . setHeader ( HttpHeaderNames . CACHE _ CONTROL , cacheControl ) ; * } < / pre > */ public final B cacheControl ( CacheControl cacheControl ) { } }
requireNonNull ( cacheControl , "cacheControl" ) ; return setHeader ( HttpHeaderNames . CACHE_CONTROL , cacheControl ) ;
public class OSGiUtil { /** * only installs a bundle , if the bundle does not already exist , if the bundle exists the existing * bundle is unloaded first . the bundle is not stored physically on the system . * @ param factory * @ param context * @ param bundle * @ return * @ throws IOException * @ throws BundleException */ public static Bundle installBundle ( BundleContext context , InputStream bundleIS , boolean closeStream , boolean checkExistence ) throws IOException , BundleException { } }
// store locally to test the bundle String name = System . currentTimeMillis ( ) + ".tmp" ; Resource dir = SystemUtil . getTempDirectory ( ) ; Resource tmp = dir . getRealResource ( name ) ; int count = 0 ; while ( tmp . exists ( ) ) tmp = dir . getRealResource ( ( count ++ ) + "_" + name ) ; IOUtil . copy ( bundleIS , tmp , closeStream ) ; try { return installBundle ( context , tmp , checkExistence ) ; } finally { tmp . delete ( ) ; }
public class GetPlaceTrends { /** * Usage : java twitter4j . examples . trends . GetPlaceTrends [ WOEID = 0] * @ param args message */ public static void main ( String [ ] args ) { } }
try { int woeid = args . length > 0 ? Integer . parseInt ( args [ 0 ] ) : 1 ; Twitter twitter = new TwitterFactory ( ) . getInstance ( ) ; Trends trends = twitter . getPlaceTrends ( woeid ) ; System . out . println ( "Showing trends for " + trends . getLocation ( ) . getName ( ) ) ; for ( Trend trend : trends . getTrends ( ) ) { System . out . println ( String . format ( "%s (tweet_volume: %d)" , trend . getName ( ) , trend . getTweetVolume ( ) ) ) ; } System . out . println ( "done." ) ; System . exit ( 0 ) ; } catch ( TwitterException te ) { te . printStackTrace ( ) ; System . out . println ( "Failed to get trends: " + te . getMessage ( ) ) ; System . exit ( - 1 ) ; } catch ( NumberFormatException nfe ) { nfe . printStackTrace ( ) ; System . out . println ( "WOEID must be number" ) ; System . exit ( - 1 ) ; }
public class ByteBuddyExtension { /** * Returns the method name transformer to use . * @ return The method name transformer to use . */ public MethodNameTransformer getMethodNameTransformer ( ) { } }
return suffix == null || suffix . length ( ) == 0 ? MethodNameTransformer . Suffixing . withRandomSuffix ( ) : new MethodNameTransformer . Suffixing ( suffix ) ;
public class CmsEditSiteForm { /** * Checks if an alias was entered twice . < p > * @ param aliasName to check * @ return true if it was defined double */ boolean isDoubleAlias ( String aliasName ) { } }
CmsSiteMatcher testAlias = new CmsSiteMatcher ( aliasName ) ; int count = 0 ; for ( Component c : m_aliases ) { if ( c instanceof CmsRemovableFormRow < ? > ) { String alName = ( String ) ( ( CmsRemovableFormRow < ? extends AbstractField < ? > > ) c ) . getInput ( ) . getValue ( ) ; if ( testAlias . equals ( new CmsSiteMatcher ( alName ) ) ) { count ++ ; } } } return count > 1 ;
public class Sanitizers { /** * Converts the input to HTML by entity escaping , stripping tags in sanitized content so the * result can safely be embedded in an HTML attribute value . */ public static String escapeHtmlAttribute ( SoyValue value ) { } }
value = normalizeNull ( value ) ; if ( isSanitizedContentOfKind ( value , SanitizedContent . ContentKind . HTML ) ) { // | escapeHtmlAttribute should only be used on attribute values that cannot have tags . return stripHtmlTags ( value . coerceToString ( ) , null , true ) ; } return escapeHtmlAttribute ( value . coerceToString ( ) ) ;
public class FlowCatalog { /** * Persist { @ link Spec } into { @ link SpecStore } and notify { @ link SpecCatalogListener } if triggerListener * is set to true . * @ param spec The Spec to be added * @ param triggerListener True if listeners should be notified . * @ return */ public Map < String , AddSpecResponse > put ( Spec spec , boolean triggerListener ) { } }
Map < String , AddSpecResponse > responseMap = new HashMap < > ( ) ; try { Preconditions . checkState ( state ( ) == State . RUNNING , String . format ( "%s is not running." , this . getClass ( ) . getName ( ) ) ) ; Preconditions . checkNotNull ( spec ) ; long startTime = System . currentTimeMillis ( ) ; log . info ( String . format ( "Adding FlowSpec with URI: %s and Config: %s" , spec . getUri ( ) , ( ( FlowSpec ) spec ) . getConfigAsProperties ( ) ) ) ; specStore . addSpec ( spec ) ; metrics . updatePutSpecTime ( startTime ) ; if ( triggerListener ) { AddSpecResponse < CallbacksDispatcher . CallbackResults < SpecCatalogListener , AddSpecResponse > > response = this . listeners . onAddSpec ( spec ) ; for ( Map . Entry < SpecCatalogListener , CallbackResult < AddSpecResponse > > entry : response . getValue ( ) . getSuccesses ( ) . entrySet ( ) ) { responseMap . put ( entry . getKey ( ) . getName ( ) , entry . getValue ( ) . getResult ( ) ) ; } } } catch ( IOException e ) { throw new RuntimeException ( "Cannot add Spec to Spec store: " + spec , e ) ; } return responseMap ;
public class UpdateCompanyNetworkConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateCompanyNetworkConfigurationRequest updateCompanyNetworkConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateCompanyNetworkConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateCompanyNetworkConfigurationRequest . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( updateCompanyNetworkConfigurationRequest . getVpcId ( ) , VPCID_BINDING ) ; protocolMarshaller . marshall ( updateCompanyNetworkConfigurationRequest . getSubnetIds ( ) , SUBNETIDS_BINDING ) ; protocolMarshaller . marshall ( updateCompanyNetworkConfigurationRequest . getSecurityGroupIds ( ) , SECURITYGROUPIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WebSocketNode { /** * 获取在线用户总数 * @ return boolean */ public CompletableFuture < Integer > getUserSize ( ) { } }
if ( this . localEngine != null && this . sncpNodeAddresses == null ) { return CompletableFuture . completedFuture ( this . localEngine . getLocalUserSize ( ) ) ; } tryAcquireSemaphore ( ) ; CompletableFuture < Integer > rs = this . sncpNodeAddresses . queryKeysStartsWithAsync ( SOURCE_SNCP_USERID_PREFIX ) . thenApply ( v -> v . size ( ) ) ; if ( semaphore != null ) rs . whenComplete ( ( r , e ) -> releaseSemaphore ( ) ) ; return rs ;
public class AmazonEC2Client { /** * Deletes a route from a Client VPN endpoint . You can only delete routes that you manually added using the * < b > CreateClientVpnRoute < / b > action . You cannot delete routes that were automatically added when associating a * subnet . To remove routes that have been automatically added , disassociate the target subnet from the Client VPN * endpoint . * @ param deleteClientVpnRouteRequest * @ return Result of the DeleteClientVpnRoute operation returned by the service . * @ sample AmazonEC2 . DeleteClientVpnRoute * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DeleteClientVpnRoute " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteClientVpnRouteResult deleteClientVpnRoute ( DeleteClientVpnRouteRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteClientVpnRoute ( request ) ;
public class DirContextAdapter { /** * { @ inheritDoc } */ @ Override public ModificationItem [ ] getModificationItems ( ) { } }
if ( ! updateMode ) { return new ModificationItem [ 0 ] ; } List < ModificationItem > tmpList = new LinkedList < ModificationItem > ( ) ; NamingEnumeration < ? extends Attribute > attributesEnumeration = null ; try { attributesEnumeration = updatedAttrs . getAll ( ) ; // find attributes that have been changed , removed or added while ( attributesEnumeration . hasMore ( ) ) { NameAwareAttribute oneAttr = ( NameAwareAttribute ) attributesEnumeration . next ( ) ; collectModifications ( oneAttr , tmpList ) ; } } catch ( NamingException e ) { throw LdapUtils . convertLdapException ( e ) ; } finally { closeNamingEnumeration ( attributesEnumeration ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Number of modifications:" + tmpList . size ( ) ) ; } return tmpList . toArray ( new ModificationItem [ tmpList . size ( ) ] ) ;
public class VoiceApi { /** * Perform a single - step conference to the specified destination . This adds the destination to the * existing call , creating a conference if necessary . * @ param connId The connection ID of the call to conference . * @ param destination The number to add to the call . * @ param userData Key / value data to include with the call . ( optional ) */ public void singleStepConference ( String connId , String destination , KeyValueCollection userData ) throws WorkspaceApiException { } }
this . singleStepConference ( connId , destination , null , userData , null , null ) ;
public class WebSink { /** * Helper to prefix metric names , convert metric value to double and return as map * @ param prefix * @ param metrics * @ return Map of metric name to metric value */ static Map < String , Double > processMetrics ( String prefix , Iterable < MetricsInfo > metrics ) { } }
Map < String , Double > map = new HashMap < > ( ) ; for ( MetricsInfo r : metrics ) { try { map . put ( prefix + r . getName ( ) , Double . valueOf ( r . getValue ( ) ) ) ; } catch ( NumberFormatException ne ) { LOG . log ( Level . SEVERE , "Could not parse metric, Name: " + r . getName ( ) + " Value: " + r . getValue ( ) , ne ) ; } } return map ;
public class SuppressCode { /** * Suppress all fields for these classes . */ public static synchronized void suppressField ( Class < ? > [ ] classes ) { } }
if ( classes == null || classes . length == 0 ) { throw new IllegalArgumentException ( "You must supply at least one class." ) ; } for ( Class < ? > clazz : classes ) { suppressField ( clazz . getDeclaredFields ( ) ) ; }
public class DevicesInner { /** * Scans for updates on a data box edge / gateway device . * @ param deviceName The device name . * @ param resourceGroupName The resource group name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > scanForUpdatesAsync ( String deviceName , String resourceGroupName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( scanForUpdatesWithServiceResponseAsync ( deviceName , resourceGroupName ) , serviceCallback ) ;
public class Scope { /** * If { @ code key } is not already associated with a value , associates it with { @ code value } . * @ return the original value , or { @ code null } if there was no value associated */ public < V > V putIfAbsent ( Key < V > key , V value ) { } }
requireNonNull ( key ) ; requireNonNull ( value ) ; if ( canStore ( key ) ) { @ SuppressWarnings ( "unchecked" ) V existingValue = ( V ) entries . get ( key ) ; if ( value == RECURSION_SENTINEL ) { throw new ConcurrentModificationException ( "Cannot access scope key " + key + " while computing its value" ) ; } else if ( existingValue == null ) { entries . put ( key , value ) ; } return existingValue ; } else if ( parent != null ) { return parent . putIfAbsent ( key , value ) ; } else { throw new IllegalStateException ( "Not in " + key . level ( ) . toString ( ) . toLowerCase ( ) + " scope" ) ; }
public class DFA2ETFWriter { /** * Write DFA specific parts in the ETF . * - initial state , * - the valuations for the state ' id ' , * - the letters in the alphabet , * - the transitions , * - the state labels ( rejecting / accepting ) , * - the mapping from states to state labels . * @ param pw the Writer . * @ param dfa the DFA to write . * @ param inputs the alphabet . */ @ Override protected void writeETF ( PrintWriter pw , DFA < ? , I > dfa , Alphabet < I > inputs ) { } }
writeETFInternal ( pw , dfa , inputs ) ;
public class CmsXmlContentEditor { /** * Makes sure the requested locale node is present in the content document * by either copying an existing locale node or creating an empty one . < p > * @ param locale the requested locale * @ return the locale */ protected Locale ensureLocale ( Locale locale ) { } }
// get the default locale for the resource List < Locale > locales = OpenCms . getLocaleManager ( ) . getDefaultLocales ( getCms ( ) , getParamResource ( ) ) ; if ( m_content != null ) { if ( ! m_content . hasLocale ( locale ) ) { try { // to copy anything we need at least one locale if ( ( m_content . getLocales ( ) . size ( ) > 0 ) ) { // required locale not available , check if an existing default locale should be copied as " template " try { // a list of possible default locales has been set as property , try to find a match m_content . copyLocale ( locales , locale ) ; } catch ( CmsException e ) { m_content . addLocale ( getCms ( ) , locale ) ; } } else { m_content . addLocale ( getCms ( ) , locale ) ; } writeContent ( ) ; } catch ( CmsException e ) { LOG . error ( e . getMessageContainer ( ) , e ) ; } } if ( ! m_content . hasLocale ( locale ) ) { // value may have changed because of the copy operation locale = m_content . getLocales ( ) . get ( 0 ) ; } } return locale ;
public class PTBConstituent { /** * setter for nullElement - sets * @ generated * @ param v value to set into the feature */ public void setNullElement ( String v ) { } }
if ( PTBConstituent_Type . featOkTst && ( ( PTBConstituent_Type ) jcasType ) . casFeat_nullElement == null ) jcasType . jcas . throwFeatMissing ( "nullElement" , "de.julielab.jules.types.PTBConstituent" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( PTBConstituent_Type ) jcasType ) . casFeatCode_nullElement , v ) ;
public class AndroidMobileCommandHelper { /** * This method forms a { @ link Map } of parameters for the element * value replacement . It is used against input elements * @ param remoteWebElement an instance which contains an element ID * @ param value a new value * @ return a key - value pair . The key is the command name . The value is a { @ link Map } command arguments . */ public static Map . Entry < String , Map < String , ? > > replaceElementValueCommand ( RemoteWebElement remoteWebElement , String value ) { } }
String [ ] parameters = new String [ ] { "id" , "value" } ; Object [ ] values = new Object [ ] { remoteWebElement . getId ( ) , value } ; return new AbstractMap . SimpleEntry < > ( REPLACE_VALUE , prepareArguments ( parameters , values ) ) ;