signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class HttpSerializer { /** * Format the list of implemented aggregators * @ param aggregators The list of aggregation functions * @ return A ChannelBuffer object to pass on to the caller * @ throws BadRequestException if the plugin has not implemented this method */ public ChannelBuffer formatAggregatorsV1 ( final Set < String > aggregators ) { } }
throw new BadRequestException ( HttpResponseStatus . NOT_IMPLEMENTED , "The requested API endpoint has not been implemented" , this . getClass ( ) . getCanonicalName ( ) + " has not implemented formatAggregatorsV1" ) ;
public class SystemPropertyUtils { /** * Search the System properties and environment variables for a value with the * provided key . Environment variables in { @ code UPPER _ CASE } style are allowed where * System properties would normally be { @ code lower . case } . * @ param key the key to resolve * @ param defaultValue the default value * @ param text optional extra context for an error message if the key resolution fails * ( e . g . if System properties are not accessible ) * @ return a static property value or null of not found */ public static String getProperty ( String key , String defaultValue , String text ) { } }
try { String propVal = System . getProperty ( key ) ; if ( propVal == null ) { // Fall back to searching the system environment . propVal = System . getenv ( key ) ; } if ( propVal == null ) { // Try with underscores . String name = key . replace ( '.' , '_' ) ; propVal = System . getenv ( name ) ; } if ( propVal == null ) { // Try uppercase with underscores as well . String name = key . toUpperCase ( Locale . ENGLISH ) . replace ( '.' , '_' ) ; propVal = System . getenv ( name ) ; } if ( propVal != null ) { return propVal ; } } catch ( Throwable ex ) { System . err . println ( "Could not resolve key '" + key + "' in '" + text + "' as system property or in environment: " + ex ) ; } return defaultValue ;
public class RestService { /** * For audit logging . */ protected UserAction getUserAction ( User user , String path , Object content , Map < String , String > headers ) { } }
Action action = getAction ( path , content , headers ) ; Entity entity = getEntity ( path , content , headers ) ; Long entityId = getEntityId ( path , content , headers ) ; String descrip = getEntityDescription ( path , content , headers ) ; if ( descrip . length ( ) > 1000 ) descrip = descrip . substring ( 0 , 999 ) ; UserAction userAction = new UserAction ( user . getCuid ( ) , action , entity , entityId , descrip ) ; userAction . setSource ( getSource ( ) ) ; return userAction ;
public class LegacySpy { /** * Alias for { @ link # expectBetween ( int , int , Threads , Query ) } with arguments 0 , 1 , { @ code threads } , { @ code queryType } * @ since 2.2 */ @ Deprecated public C expectAtMostOnce ( Threads threadMatcher , Query query ) { } }
return expect ( SqlQueries . atMostOneQuery ( ) . threads ( threadMatcher ) . type ( adapter ( query ) ) ) ;
public class KuznechikCipher { /** * Converting binary representation of a key to internal format * @ param key raw key * @ return key in internal format */ static KuzIntKey convertKey ( byte [ ] key ) { } }
if ( key . length != 32 ) { throw new RuntimeException ( "Key might be 32 bytes length" ) ; } KuzIntKey kuz = new KuzIntKey ( ) ; // w128 _ t c , x , y , z ; Kuz128 c = new Kuz128 ( ) , x = new Kuz128 ( ) , y = new Kuz128 ( ) , z = new Kuz128 ( ) ; for ( int i = 0 ; i < 16 ; i ++ ) { // this will be have to changed for little - endian systems // x . b [ i ] = key [ i ] ; // y . b [ i ] = key [ i + 16 ] ; x . getB ( ) [ i ] = key [ i ] ; y . getB ( ) [ i ] = key [ i + 16 ] ; } // kuz - > k [ 0 ] . q [ 0 ] = x . q [ 0 ] ; // kuz - > k [ 0 ] . q [ 1 ] = x . q [ 1 ] ; // kuz - > k [ 1 ] . q [ 0 ] = y . q [ 0 ] ; // kuz - > k [ 1 ] . q [ 1 ] = y . q [ 1 ] ; kuz . getK ( ) [ 0 ] . set ( x ) ; kuz . getK ( ) [ 1 ] . set ( y ) ; for ( int i = 1 ; i <= 32 ; i ++ ) { // C Value // c . q [ 0 ] = 0; // c . q [ 1 ] = 0; // c . b [ 15 ] = i ; / / load round in lsb // kuz _ l ( & c ) ; c . setQ ( 0 , 0 ) ; c . setQ ( 1 , 0 ) ; c . getB ( ) [ 15 ] = ( byte ) i ; KuznechikMath . kuz_l ( c ) ; // z . q [ 0 ] = x . q [ 0 ] ^ c . q [ 0 ] ; // z . q [ 1 ] = x . q [ 1 ] ^ c . q [ 1 ] ; // for ( j = 0 ; j < 16 ; j + + ) // z . b [ j ] = kuz _ pi [ z . b [ j ] ] ; // kuz _ l ( & z ) ; z . setQ ( 0 , x . getQ ( 0 ) ^ c . getQ ( 0 ) ) ; z . setQ ( 1 , x . getQ ( 1 ) ^ c . getQ ( 1 ) ) ; for ( int j = 0 ; j < 16 ; j ++ ) { z . getB ( ) [ j ] = KuznechikTables . kuz_pi [ ( z . getB ( ) [ j ] & 0xFF ) ] ; } KuznechikMath . kuz_l ( z ) ; // z . q [ 0 ] ^ = y . q [ 0 ] ; // z . q [ 1 ] ^ = y . q [ 1 ] ; z . setQ ( 0 , z . getQ ( 0 ) ^ y . getQ ( 0 ) ) ; z . setQ ( 1 , z . getQ ( 1 ) ^ y . getQ ( 1 ) ) ; // y . q [ 0 ] = x . q [ 0 ] ; // y . q [ 1 ] = x . q [ 1 ] ; y . set ( x ) ; // x . q [ 0 ] = z . q [ 0 ] ; // x . q [ 1 ] = z . q [ 1 ] ; x . set ( z ) ; // if ( ( i & 7 ) = = 0 ) { // kuz - > k [ ( i > > 2 ) ] . q [ 0 ] = x . q [ 0 ] ; // kuz - > k [ ( i > > 2 ) ] . q [ 1 ] = x . q [ 1 ] ; // kuz - > k [ ( i > > 2 ) + 1 ] . q [ 0 ] = y . q [ 0 ] ; // kuz - > k [ ( i > > 2 ) + 1 ] . q [ 1 ] = y . q [ 1 ] ; if ( ( i & 7 ) == 0 ) { kuz . getK ( ) [ ( i >> 2 ) ] . set ( x ) ; kuz . getK ( ) [ ( i >> 2 ) + 1 ] . set ( y ) ; } } return kuz ;
public class KvResponseBase { /** * Get a single , resolved object from this response . * The values will be converted to the supplied class using the * { @ link com . basho . riak . client . api . convert . Converter } returned from the { @ link com . basho . riak . client . api . convert . ConverterFactory } . * By default this will be the { @ link com . basho . riak . client . api . convert . JSONConverter } , * or no conversion at all if you pass in { @ code RiakObject . class } . If there are multiple * values present ( siblings ) , they will then be resolved using the * { @ link com . basho . riak . client . api . cap . ConflictResolver } returned by the { @ link com . basho . riak . client . api . cap . ConflictResolverFactory } . * @ param clazz the class to be converted to . * @ return the single , resolved value converted to the supplied class . * @ throws UnresolvedConflictException * @ see ConverterFactory * @ see Converter * @ see ConflictResolverFactory * @ see ConflictResolver */ public < T > T getValue ( Class < T > clazz ) throws UnresolvedConflictException { } }
Converter < T > converter = ConverterFactory . getInstance ( ) . getConverter ( clazz ) ; List < T > convertedValues = convertValues ( converter ) ; ConflictResolver < T > resolver = ConflictResolverFactory . getInstance ( ) . getConflictResolver ( clazz ) ; T resolved = resolver . resolve ( convertedValues ) ; if ( hasValues ( ) && resolved != null ) { VClock vclock = values . get ( 0 ) . getVClock ( ) ; AnnotationUtil . setVClock ( resolved , vclock ) ; } return resolved ;
public class SnackBar { /** * Set the text that the ActionButton is to display . * @ param text If null , then the ActionButton will be hidden . * @ return This SnackBar for chaining methods . */ public SnackBar actionText ( CharSequence text ) { } }
if ( TextUtils . isEmpty ( text ) ) mAction . setVisibility ( View . INVISIBLE ) ; else { mAction . setVisibility ( View . VISIBLE ) ; mAction . setText ( text ) ; } return this ;
public class CollatorHelper { /** * Create a collator that is based on the standard collator but sorts spaces * before dots , because spaces are more important word separators than dots . * Another example is the correct sorting of things like " 1.1 a " vs . " 1.1.1 b " * . This is the default collator used for sorting by default ! * @ param aLocale * The locale for which the collator is to be retrieved . May be * < code > null < / code > to indicate the usage of the default locale . * @ return The created { @ link RuleBasedCollator } and never < code > null < / code > . */ @ Nonnull @ ReturnsMutableCopy public static Collator getCollatorSpaceBeforeDot ( @ Nullable final Locale aLocale ) { } }
// Ensure to not pass null locale in final Locale aRealLocale = aLocale == null ? SystemHelper . getSystemLocale ( ) : aLocale ; // Always create a clone ! return ( Collator ) s_aCache . getFromCache ( aRealLocale ) . clone ( ) ;
public class UnsignedNumeric { /** * Writes an int in a variable - length format . Writes between one and five bytes . Smaller values take fewer bytes . * Negative numbers are not supported . * @ param i int to write */ public static void writeUnsignedInt ( ObjectOutput out , int i ) throws IOException { } }
while ( ( i & ~ 0x7F ) != 0 ) { out . writeByte ( ( byte ) ( ( i & 0x7f ) | 0x80 ) ) ; i >>>= 7 ; } out . writeByte ( ( byte ) i ) ;
public class NvdCveUpdater { /** * Initialize the executor services for download and processing of the NVD * CVE XML data . */ protected void initializeExecutorServices ( ) { } }
final int downloadPoolSize ; final int max = settings . getInt ( Settings . KEYS . MAX_DOWNLOAD_THREAD_POOL_SIZE , 3 ) ; if ( DOWNLOAD_THREAD_POOL_SIZE > max ) { downloadPoolSize = max ; } else { downloadPoolSize = DOWNLOAD_THREAD_POOL_SIZE ; } downloadExecutorService = Executors . newFixedThreadPool ( downloadPoolSize ) ; processingExecutorService = Executors . newFixedThreadPool ( PROCESSING_THREAD_POOL_SIZE ) ; LOGGER . debug ( "#download threads: {}" , downloadPoolSize ) ; LOGGER . debug ( "#processing threads: {}" , PROCESSING_THREAD_POOL_SIZE ) ;
public class ImageMiscOps { /** * Sets each value in the image to a value drawn from an uniform distribution that has a range of min & le ; X & lt ; max . * @ param img Image which is to be filled . Modified , * @ param rand Random number generator * @ param min Minimum value of the distribution , inclusive * @ param max Maximum value of the distribution , inclusive */ public static void fillUniform ( GrayF32 img , Random rand , float min , float max ) { } }
float range = max - min ; float [ ] data = img . data ; for ( int y = 0 ; y < img . height ; y ++ ) { int index = img . getStartIndex ( ) + y * img . getStride ( ) ; for ( int x = 0 ; x < img . width ; x ++ ) { data [ index ++ ] = rand . nextFloat ( ) * range + min ; } }
public class TimerServiceRegistry { /** * Registers timerServie under given id . In case timer service is already registered * with this id it will be overridden . * @ param id key used to get hold of the timer service instance * @ param timerService fully initialized TimerService instance */ public void registerTimerService ( String id , TimerService timerService ) { } }
if ( timerService instanceof GlobalTimerService ) { ( ( GlobalTimerService ) timerService ) . setTimerServiceId ( id ) ; } this . registeredServices . put ( id , timerService ) ;
public class RequestFactory { /** * Create new Check policies request . * @ param orgToken WhiteSource organization token . * @ param userKey user key uniquely identifying the account at white source . * @ param projects Projects status statement to check . * @ param product Name or WhiteSource service token of the product whose policies to check . * @ param productVersion Version of the product whose policies to check . * @ param requesterEmail Email of the WhiteSource user that requests to update WhiteSource . * @ return Newly created request to check policies application . * @ deprecated Use { @ link RequestFactory # newCheckPolicyComplianceRequest ( String , String , String , Collection , boolean , String ) } */ @ Deprecated public CheckPoliciesRequest newCheckPoliciesRequest ( String orgToken , String product , String productVersion , Collection < AgentProjectInfo > projects , String userKey , String requesterEmail ) { } }
return ( CheckPoliciesRequest ) prepareRequest ( new CheckPoliciesRequest ( projects ) , orgToken , requesterEmail , product , productVersion , userKey , false , false , null , null , null , null , null , null ) ;
public class SimpleModule { /** * Add the given { @ link PagingBehavior } to the module * @ param pagingBehavior the paging behavior */ public void addPagingBehavior ( PagingBehavior pagingBehavior ) { } }
checkInitialized ( ) ; // avoid adding the same type ( ! ) of behavior twice , error out if that ' s the case boolean behaviorTypeAdded = pagingBehaviors . stream ( ) . anyMatch ( pbh -> pbh . getClass ( ) . equals ( pagingBehavior . getClass ( ) ) ) ; if ( ! behaviorTypeAdded ) { pagingBehaviors . add ( pagingBehavior ) ; } else { throw new IllegalArgumentException ( "PagingBehavior of same type already added. Type:" + pagingBehavior . getClass ( ) . getSimpleName ( ) ) ; }
public class Logger { /** * Logs the specified message at the WARN level . * @ param msg the specified message */ public void warn ( final String msg ) { } }
if ( proxy . isWarnEnabled ( ) ) { if ( proxy instanceof LocationAwareLogger ) { ( ( LocationAwareLogger ) proxy ) . log ( null , FQCN , LocationAwareLogger . WARN_INT , msg , null , null ) ; } else { proxy . warn ( msg ) ; } }
public class CacheMapUtil { /** * retrial all the keys of the cached map * @ param key cache key * @ param kClazz the key ' s class * @ return all the keys of the cached map , or null if the key does not exist */ public static < T > Maybe < Set < T > > keys ( String key , Class < T > kClazz ) { } }
return keys ( CacheService . CACHE_CONFIG_BEAN , key , kClazz ) ;
public class Closeables { /** * Creates a stream that when closed will also close the underlying spliterator * @ param spliterator spliterator to back the stream and subsequently close * @ param parallel whether or not the returned stream is parallel or not * @ param < E > the type of the stream * @ return the stream to use */ public static < E > Stream < E > stream ( CloseableSpliterator < E > spliterator , boolean parallel ) { } }
Stream < E > stream = StreamSupport . stream ( spliterator , parallel ) ; stream . onClose ( spliterator :: close ) ; return stream ;
public class PerfectHashDictionaryStateCard { /** * Give the Graphviz dot representation of this automaton . States will also list the * number of suffixes ' under ' that state . * @ return */ @ Override public String toDot ( ) { } }
StringBuilder dotBuilder = new StringBuilder ( ) ; dotBuilder . append ( "digraph G {\n" ) ; for ( int state = 0 ; state < d_stateOffsets . size ( ) ; ++ state ) { for ( int trans = d_stateOffsets . get ( state ) ; trans < transitionsUpperBound ( state ) ; ++ trans ) dotBuilder . append ( String . format ( "%d -> %d [label=\"%c\"]\n" , state , d_transitionTo . get ( trans ) , d_transitionChars [ trans ] ) ) ; if ( d_finalStates . get ( state ) ) dotBuilder . append ( String . format ( "%d [peripheries=2,label=\"%d (%d)\"];\n" , state , state , d_stateNSuffixes . get ( state ) ) ) ; else dotBuilder . append ( String . format ( "%d [label=\"%d (%d)\"];\n" , state , state , d_stateNSuffixes . get ( state ) ) ) ; } dotBuilder . append ( "}" ) ; return dotBuilder . toString ( ) ;
public class TimeUnit { /** * 该方法用于更新timeBase使之具有上下文关联性 */ public void modifyTimeBase ( ) { } }
String [ ] time_grid = new String [ 6 ] ; time_grid = timeBaseText . split ( "-" ) ; String s = "" ; if ( _tp . tunit [ 0 ] != - 1 ) { s += Integer . toString ( _tp . tunit [ 0 ] ) ; } else { s += time_grid [ 0 ] ; } for ( int i = 1 ; i < 6 ; i ++ ) { s += "-" ; if ( _tp . tunit [ i ] != - 1 ) { s += Integer . toString ( _tp . tunit [ i ] ) ; } else { s += time_grid [ i ] ; } } timeBaseText = s ;
public class AWSDirectoryServiceClient { /** * Stops the directory sharing between the directory owner and consumer accounts . * @ param unshareDirectoryRequest * @ return Result of the UnshareDirectory operation returned by the service . * @ throws EntityDoesNotExistException * The specified entity could not be found . * @ throws InvalidTargetException * The specified shared target is not valid . * @ throws DirectoryNotSharedException * The specified directory has not been shared with this AWS account . * @ throws ClientException * A client exception has occurred . * @ throws ServiceException * An exception has occurred in AWS Directory Service . * @ sample AWSDirectoryService . UnshareDirectory * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / UnshareDirectory " target = " _ top " > AWS API * Documentation < / a > */ @ Override public UnshareDirectoryResult unshareDirectory ( UnshareDirectoryRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUnshareDirectory ( request ) ;
public class CmsJspDateSeriesBean { /** * Returns the last event of this series . < p > * In case this is just a single event and not a series , this is identical to the date of the event . < p > * @ return the last event of this series */ public CmsJspInstanceDateBean getLast ( ) { } }
if ( ( m_lastEvent == null ) && ( m_dates != null ) && ( ! m_dates . isEmpty ( ) ) ) { m_lastEvent = new CmsJspInstanceDateBean ( ( Date ) m_dates . last ( ) . clone ( ) , CmsJspDateSeriesBean . this ) ; } return m_lastEvent ;
public class JobsImpl { /** * Lists the jobs that have been created under the specified job schedule . * @ param jobScheduleId The ID of the job schedule from which you want to get a list of jobs . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; CloudJob & gt ; object wrapped in { @ link ServiceResponseWithHeaders } if successful . */ public Observable < ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > > listFromJobScheduleSinglePageAsync ( final String jobScheduleId ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( jobScheduleId == null ) { throw new IllegalArgumentException ( "Parameter jobScheduleId is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final JobListFromJobScheduleOptions jobListFromJobScheduleOptions = null ; String filter = null ; String select = null ; String expand = null ; Integer maxResults = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . listFromJobSchedule ( jobScheduleId , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , filter , select , expand , maxResults , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < PageImpl < CloudJob > , JobListFromJobScheduleHeaders > result = listFromJobScheduleDelegate ( response ) ; return Observable . just ( new ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > ( result . body ( ) , result . headers ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ContextUtils { /** * < p > Takes the generic { @ link Object } of a context and returns * the actual { @ link Context } instance as a { @ link Fragment } if * it conforms . * @ param context * the { @ link Object } whose { @ link Context } instance is * to be discovered * < br > < br > * @ return the { @ link Fragment } instance of the given { @ link Object } * < br > < br > * @ throws ContextNotFoundException * if the { @ link Context } does not conform to a { @ link Fragment } * < br > < br > * @ since 1.0.0 */ public static Fragment asFragment ( Object context ) { } }
if ( ContextUtils . isFragment ( context ) ) return Fragment . class . cast ( context ) ; throw new ContextNotFoundException ( context . getClass ( ) , Fragment . class ) ;
public class ShowUserListMembership { /** * Usage : java twitter4j . examples . list . ShowUserListMembership [ list id ] [ user id ] * @ param args message */ public static void main ( String [ ] args ) { } }
if ( args . length < 2 ) { System . out . println ( "Usage: java twitter4j.examples.list.ShowUserListMembership [list id] [user id]" ) ; System . exit ( - 1 ) ; } try { Twitter twitter = new TwitterFactory ( ) . getInstance ( ) ; long listId = Long . parseLong ( args [ 0 ] ) ; UserList list = twitter . showUserList ( listId ) ; long userId = Integer . parseInt ( args [ 1 ] ) ; User user = twitter . showUser ( userId ) ; try { twitter . showUserListMembership ( listId , userId ) ; System . out . println ( "@" + user . getScreenName ( ) + " is in the list:" + list . getName ( ) ) ; } catch ( TwitterException te ) { if ( te . getStatusCode ( ) == 404 ) { System . out . println ( "@" + user . getScreenName ( ) + " is not in the list:" + list . getName ( ) ) ; } } System . exit ( 0 ) ; } catch ( TwitterException te ) { te . printStackTrace ( ) ; System . out . println ( "Failed to check user membership: " + te . getMessage ( ) ) ; System . exit ( - 1 ) ; }
public class RawScale2x { /** * Get the scale image data . Note this is the method that does the work * so it might take some time to process . * @ return An array of pixels 4 times the size of the input array containing * the smoothly scaled image */ public int [ ] getScaledData ( ) { } }
for ( int x = 0 ; x < width ; x ++ ) { for ( int y = 0 ; y < height ; y ++ ) { process ( x , y ) ; } } return dstImage ;
public class Base64 { /** * Decodes a char [ ] containing characters in the Base - N alphabet . * @ param pArray A byte array containing Base - N character data * @ param offset offset in the input array where the data starts * @ param length length of the data , starting at offset in the input array * @ param ctx { @ link Base64Context } or { @ link Base64ContextPooled } , pooled version is preferred * @ return a byte array containing binary data */ public Base64Context decode ( final char [ ] pArray , final int offset , final int length , Base64Context ctx ) throws FileParsingException { } }
if ( pArray == null || pArray . length == 0 ) { return null ; } decodeImpl ( pArray , offset , length , ctx ) ; decodeImpl ( pArray , 0 , EOF , ctx ) ; // Notify decoder of EOF . return ctx ;
public class HiveUtils { /** * Selects an appropriate field from the given Hive table schema to insert JSON data into if the feature is enabled * @ param settings Settings to read schema information from * @ return A FieldAlias object that projects the json source field into the select destination field */ static String discoverJsonFieldName ( Settings settings , FieldAlias alias ) { } }
Set < String > virtualColumnsToBeRemoved = new HashSet < String > ( HiveConstants . VIRTUAL_COLUMNS . length ) ; Collections . addAll ( virtualColumnsToBeRemoved , HiveConstants . VIRTUAL_COLUMNS ) ; List < String > columnNames = StringUtils . tokenize ( settings . getProperty ( HiveConstants . COLUMNS ) , "," ) ; Iterator < String > nameIter = columnNames . iterator ( ) ; List < String > columnTypes = StringUtils . tokenize ( settings . getProperty ( HiveConstants . COLUMNS_TYPES ) , ":" ) ; Iterator < String > typeIter = columnTypes . iterator ( ) ; String candidateField = null ; while ( nameIter . hasNext ( ) && candidateField == null ) { String columnName = nameIter . next ( ) ; String type = typeIter . next ( ) ; if ( "string" . equalsIgnoreCase ( type ) && ! virtualColumnsToBeRemoved . contains ( columnName ) ) { candidateField = columnName ; } } Assert . hasText ( candidateField , "Could not identify a field to insert JSON data into " + "from the given fields : {" + columnNames + "} of types {" + columnTypes + "}" ) ; // If the candidate field is aliased to something else , find the alias name and use that for the field name : candidateField = alias . toES ( candidateField ) ; return candidateField ;
public class spilloverpolicy { /** * Use this API to fetch all the spilloverpolicy resources that are configured on netscaler . */ public static spilloverpolicy [ ] get ( nitro_service service , options option ) throws Exception { } }
spilloverpolicy obj = new spilloverpolicy ( ) ; spilloverpolicy [ ] response = ( spilloverpolicy [ ] ) obj . get_resources ( service , option ) ; return response ;
public class UtlInvBase { /** * < p > Update invoice totals after its line has been changed / deleted * and taxes lines has been made * or after tax line has been changed ( Invoice basis ) . < / p > * @ param < T > invoice type * @ param pReqVars additional param * @ param pInv Invoice * @ param pAs accounting settings * @ param pInvTxMeth tax method code / data for purchase / sales invoice * @ throws Exception - an exception */ public final < T extends IInvoice > void updInvTots ( final Map < String , Object > pReqVars , final T pInv , final AccSettings pAs , final IInvTxMeth < T , ? > pInvTxMeth ) throws Exception { } }
String query = pInvTxMeth . lazyGetQuTotals ( ) ; query = query . replace ( ":ITSOWNER" , pInv . getItsId ( ) . toString ( ) ) ; if ( pInvTxMeth . getTblNmsTot ( ) . length == 5 ) { // sales / purchase : query = query . replace ( ":TGOODLN" , pInvTxMeth . getTblNmsTot ( ) [ 0 ] ) ; query = query . replace ( ":TSERVICELN" , pInvTxMeth . getTblNmsTot ( ) [ 1 ] ) ; query = query . replace ( ":TTAXLN" , pInvTxMeth . getTblNmsTot ( ) [ 2 ] ) ; } else { // returns : query = query . replace ( ":TGOODLN" , pInvTxMeth . getTblNmsTot ( ) [ 0 ] ) ; query = query . replace ( ":TTAXLN" , pInvTxMeth . getTblNmsTot ( ) [ 1 ] ) ; } String [ ] columns = new String [ ] { "SUBTOTAL" , "ITSTOTAL" , "TOTALTAXES" , "FOREIGNSUBTOTAL" , "FOREIGNTOTAL" , "FOREIGNTOTALTAXES" } ; Double [ ] totals = getSrvDatabase ( ) . evalDoubleResults ( query , columns ) ; if ( totals [ 0 ] == null ) { totals [ 0 ] = 0d ; } if ( totals [ 1 ] == null ) { totals [ 1 ] = 0d ; } if ( totals [ 2 ] == null ) { totals [ 2 ] = 0d ; } if ( totals [ 3 ] == null ) { totals [ 3 ] = 0d ; } if ( totals [ 4 ] == null ) { totals [ 4 ] = 0d ; } if ( totals [ 5 ] == null ) { totals [ 5 ] = 0d ; } if ( pInv . getPriceIncTax ( ) ) { pInv . setItsTotal ( BigDecimal . valueOf ( totals [ 1 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getRoundingMode ( ) ) ) ; pInv . setTotalTaxes ( BigDecimal . valueOf ( totals [ 2 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getSalTaxRoundMode ( ) ) ) ; pInv . setSubtotal ( pInv . getItsTotal ( ) . subtract ( pInv . getTotalTaxes ( ) ) ) ; pInv . setForeignTotal ( BigDecimal . valueOf ( totals [ 4 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getRoundingMode ( ) ) ) ; pInv . setForeignTotalTaxes ( BigDecimal . valueOf ( totals [ 5 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getSalTaxRoundMode ( ) ) ) ; pInv . setForeignSubtotal ( pInv . getForeignTotal ( ) . subtract ( pInv . getForeignTotalTaxes ( ) ) ) ; } else { pInv . setSubtotal ( BigDecimal . valueOf ( totals [ 0 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getRoundingMode ( ) ) ) ; pInv . setTotalTaxes ( BigDecimal . valueOf ( totals [ 2 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getSalTaxRoundMode ( ) ) ) ; pInv . setItsTotal ( pInv . getSubtotal ( ) . add ( pInv . getTotalTaxes ( ) ) ) ; pInv . setForeignSubtotal ( BigDecimal . valueOf ( totals [ 3 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getRoundingMode ( ) ) ) ; pInv . setForeignTotalTaxes ( BigDecimal . valueOf ( totals [ 5 ] ) . setScale ( pAs . getPricePrecision ( ) , pAs . getSalTaxRoundMode ( ) ) ) ; pInv . setForeignTotal ( pInv . getForeignSubtotal ( ) . add ( pInv . getForeignTotalTaxes ( ) ) ) ; } getSrvOrm ( ) . updateEntity ( pReqVars , pInv ) ;
public class QVarXQueryGenerator { /** * Uses the qvar map to generate a XQuery string containing qvar constraints , * and the qvar map variable which maps qvar names to their respective formula ID ' s in the result . */ private void generateQvarConstraints ( ) { } }
final StringBuilder qvarConstrBuilder = new StringBuilder ( ) ; final StringBuilder qvarMapStrBuilder = new StringBuilder ( ) ; final Iterator < Map . Entry < String , ArrayList < String > > > entryIterator = qvar . entrySet ( ) . iterator ( ) ; if ( entryIterator . hasNext ( ) ) { qvarMapStrBuilder . append ( "declare function local:qvarMap($x) {\n map {" ) ; while ( entryIterator . hasNext ( ) ) { final Map . Entry < String , ArrayList < String > > currentEntry = entryIterator . next ( ) ; final Iterator < String > valueIterator = currentEntry . getValue ( ) . iterator ( ) ; final String firstValue = valueIterator . next ( ) ; qvarMapStrBuilder . append ( '"' ) . append ( currentEntry . getKey ( ) ) . append ( '"' ) . append ( " : (data($x" ) . append ( firstValue ) . append ( "/@xml:id)" ) ; // check if there are additional values that we need to constrain if ( valueIterator . hasNext ( ) ) { if ( qvarConstrBuilder . length ( ) > 0 ) { // only add beginning and if it ' s an additional constraint in the aggregate qvar string qvarConstrBuilder . append ( "\n and " ) ; } while ( valueIterator . hasNext ( ) ) { // process second value onwards final String currentValue = valueIterator . next ( ) ; qvarMapStrBuilder . append ( ",data($x" ) . append ( currentValue ) . append ( "/@xml-id)" ) ; // These constraints specify that the same qvars must refer to the same nodes , // using the XQuery " = " equality // This is equality based on : same text , same node names , and same children nodes qvarConstrBuilder . append ( "$x" ) . append ( firstValue ) . append ( " = $x" ) . append ( currentValue ) ; if ( valueIterator . hasNext ( ) ) { qvarConstrBuilder . append ( " and " ) ; } } } qvarMapStrBuilder . append ( ')' ) ; if ( entryIterator . hasNext ( ) ) { qvarMapStrBuilder . append ( ',' ) ; } } qvarMapStrBuilder . append ( "}\n};" ) ; } qvarMapVariable = qvarMapStrBuilder . toString ( ) ; qvarConstraint = qvarConstrBuilder . toString ( ) ;
public class IotHubResourcesInner { /** * Create or update the metadata of an IoT hub . * Create or update the metadata of an Iot hub . The usual pattern to modify a property is to retrieve the IoT hub metadata and security metadata , and then combine them with the modified values in a new body to update the IoT hub . * @ param resourceGroupName The name of the resource group that contains the IoT hub . * @ param resourceName The name of the IoT hub . * @ param iotHubDescription The IoT hub metadata and security metadata . * @ param ifMatch ETag of the IoT Hub . Do not specify for creating a brand new IoT Hub . Required to update an existing IoT Hub . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < IotHubDescriptionInner > createOrUpdateAsync ( String resourceGroupName , String resourceName , IotHubDescriptionInner iotHubDescription , String ifMatch , final ServiceCallback < IotHubDescriptionInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , resourceName , iotHubDescription , ifMatch ) , serviceCallback ) ;
public class KnowledgeOperations { /** * Gets the input . * @ param message the message * @ param operation the operation * @ param runtime the runtime engine * @ return the input */ public static Object getInput ( Message message , KnowledgeOperation operation , KnowledgeRuntimeEngine runtime ) { } }
List < Object > list = getList ( message , operation . getInputExpressionMappings ( ) ) ; switch ( list . size ( ) ) { case 0 : return filterRemoteDefaultInputContent ( message . getContent ( ) , runtime ) ; case 1 : return list . get ( 0 ) ; default : return list ; }
public class RandomMultiDataSetIterator { /** * Generate a random array with the specified shape and order * @ param shape Shape of the array * @ param order Order of array ( ' c ' or ' f ' ) * @ param values Values to fill the array with * @ return Random array of specified shape + contents */ public static INDArray generate ( long [ ] shape , char order , Values values ) { } }
switch ( values ) { case RANDOM_UNIFORM : return Nd4j . rand ( Nd4j . createUninitialized ( shape , order ) ) ; case RANDOM_NORMAL : return Nd4j . randn ( Nd4j . createUninitialized ( shape , order ) ) ; case ONE_HOT : Random r = new Random ( Nd4j . getRandom ( ) . nextLong ( ) ) ; INDArray out = Nd4j . create ( shape , order ) ; if ( shape . length == 1 ) { out . putScalar ( r . nextInt ( ( int ) shape [ 0 ] ) , 1.0 ) ; } else if ( shape . length == 2 ) { for ( int i = 0 ; i < shape [ 0 ] ; i ++ ) { out . putScalar ( i , r . nextInt ( ( int ) shape [ 1 ] ) , 1.0 ) ; } } else if ( shape . length == 3 ) { for ( int i = 0 ; i < shape [ 0 ] ; i ++ ) { for ( int j = 0 ; j < shape [ 2 ] ; j ++ ) { out . putScalar ( i , r . nextInt ( ( int ) shape [ 1 ] ) , j , 1.0 ) ; } } } else if ( shape . length == 4 ) { for ( int i = 0 ; i < shape [ 0 ] ; i ++ ) { for ( int j = 0 ; j < shape [ 2 ] ; j ++ ) { for ( int k = 0 ; k < shape [ 3 ] ; k ++ ) { out . putScalar ( i , r . nextInt ( ( int ) shape [ 1 ] ) , j , k , 1.0 ) ; } } } } else if ( shape . length == 5 ) { for ( int i = 0 ; i < shape [ 0 ] ; i ++ ) { for ( int j = 0 ; j < shape [ 2 ] ; j ++ ) { for ( int k = 0 ; k < shape [ 3 ] ; k ++ ) { for ( int l = 0 ; l < shape [ 4 ] ; l ++ ) { out . putScalar ( new int [ ] { i , r . nextInt ( ( int ) shape [ 1 ] ) , j , k , l } , 1.0 ) ; } } } } } else { throw new RuntimeException ( "Not supported: rank 6+ arrays. Shape: " + Arrays . toString ( shape ) ) ; } return out ; case ZEROS : return Nd4j . create ( shape , order ) ; case ONES : return Nd4j . createUninitialized ( shape , order ) . assign ( 1.0 ) ; case BINARY : return Nd4j . getExecutioner ( ) . exec ( new BernoulliDistribution ( Nd4j . createUninitialized ( shape , order ) , 0.5 ) ) ; case INTEGER_0_10 : return Transforms . floor ( Nd4j . rand ( shape ) . muli ( 10 ) , false ) ; case INTEGER_0_100 : return Transforms . floor ( Nd4j . rand ( shape ) . muli ( 100 ) , false ) ; case INTEGER_0_1000 : return Transforms . floor ( Nd4j . rand ( shape ) . muli ( 1000 ) , false ) ; case INTEGER_0_10000 : return Transforms . floor ( Nd4j . rand ( shape ) . muli ( 10000 ) , false ) ; case INTEGER_0_100000 : return Transforms . floor ( Nd4j . rand ( shape ) . muli ( 100000 ) , false ) ; default : throw new RuntimeException ( "Unknown enum value: " + values ) ; }
public class StatementBuilder { /** * Returns the list of { @ link Snak } objects for a given qualifier property . * @ param propertyIdValue * @ return */ protected ArrayList < Snak > getQualifierList ( PropertyIdValue propertyIdValue ) { } }
ArrayList < Snak > result = this . qualifiers . get ( propertyIdValue ) ; if ( result == null ) { result = new ArrayList < Snak > ( ) ; this . qualifiers . put ( propertyIdValue , result ) ; } return result ;
public class FlatTreeNode { /** * Return a sequence of all < em > mapped < / em > nodes of the whole underlying * tree . This is a convenient method for * < pre > { @ code * final ISeq < B > seq = stream ( ) * . map ( mapper ) * . collect ( ISeq . toISeq ( ) ) * } < / pre > * @ param mapper the mapper function * @ param < B > the mapped type * @ return a sequence of all < em > mapped < / em > nodes */ public < B > ISeq < B > map ( final Function < FlatTreeNode < T > , ? extends B > mapper ) { } }
return stream ( ) . map ( mapper ) . collect ( ISeq . toISeq ( ) ) ;
public class CacheHandler { /** * 缓存字段查询到idkey * @ param fieldCacheKey * @ param idCacheKey * @ param expired */ private void cacheFieldRefKey ( String fieldCacheKey , String idCacheKey , long expired ) { } }
if ( nullValueCache ) { getCacheProvider ( ) . set ( fieldCacheKey , idCacheKey , expired ) ; } else { getCacheProvider ( ) . setStr ( fieldCacheKey , idCacheKey , expired ) ; }
public class StaticTypeCheckingSupport { /** * Checks that arguments and parameter types match . * @ param params method parameters * @ param args type arguments * @ return - 1 if arguments do not match , 0 if arguments are of the exact type and > 0 when one or more argument is * not of the exact type but still match */ public static int allParametersAndArgumentsMatch ( Parameter [ ] params , ClassNode [ ] args ) { } }
if ( params == null ) { params = Parameter . EMPTY_ARRAY ; } int dist = 0 ; if ( args . length < params . length ) return - 1 ; // we already know the lengths are equal for ( int i = 0 ; i < params . length ; i ++ ) { ClassNode paramType = params [ i ] . getType ( ) ; ClassNode argType = args [ i ] ; if ( ! isAssignableTo ( argType , paramType ) ) return - 1 ; else { if ( ! paramType . equals ( argType ) ) dist += getDistance ( argType , paramType ) ; } } return dist ;
public class TcasesOpenApiIO { /** * Returns a { @ link SystemInputDef system input definition } for the API requests defined by the given * OpenAPI specification . Returns null if the given spec defines no API requests to model . */ public static SystemInputDef getRequestInputModel ( InputStream api , ModelOptions options ) { } }
try ( OpenApiReader reader = new OpenApiReader ( api ) ) { return TcasesOpenApi . getRequestInputModel ( reader . read ( ) , options ) ; }
public class EntryBuffer { /** * Returns the buffer index for the given offset . */ private int offset ( long index ) { } }
int offset = ( int ) ( index % buffer . length ) ; if ( offset < 0 ) { offset += buffer . length ; } return offset ;
public class AnnotationTargetsImpl_Targets { /** * PostCondition : haveScannedDirectClasses = = true */ protected void doScanDirectClasses ( ) throws AnnotationTargets_Exception { } }
if ( haveScannedDirectClasses ) { return ; } haveScannedDirectClasses = true ; if ( rootClassSource == null ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Call to scan direct classes before activation" ) ; } } else { if ( directClassSourceCount == 0 ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Strange call to scan direct classes before adding direct class sources" ) ; } } createScanner ( rootClassSource ) . scanDirect ( ) ; // ' createScanner ' and ' scanDirect ' both throw AnnotationTargets _ Exception }
public class PdfChunk { /** * Gets the width of the < CODE > PdfChunk < / CODE > taking into account the * extra character and word spacing . * @ param charSpacing the extra character spacing * @ param wordSpacing the extra word spacing * @ return the calculated width */ public float getWidthCorrected ( float charSpacing , float wordSpacing ) { } }
if ( image != null ) { return image . getScaledWidth ( ) + charSpacing ; } int numberOfSpaces = 0 ; int idx = - 1 ; while ( ( idx = value . indexOf ( ' ' , idx + 1 ) ) >= 0 ) ++ numberOfSpaces ; return width ( ) + ( value . length ( ) * charSpacing + numberOfSpaces * wordSpacing ) ;
public class Graphics { /** * Draw a rounded rectangle * @ param x * The x coordinate of the top left corner of the rectangle * @ param y * The y coordinate of the top left corner of the rectangle * @ param width * The width of the rectangle * @ param height * The height of the rectangle * @ param cornerRadius * The radius of the rounded edges on the corners * @ param segs * The number of segments to make the corners out of */ public void drawRoundRect ( float x , float y , float width , float height , int cornerRadius , int segs ) { } }
if ( cornerRadius < 0 ) throw new IllegalArgumentException ( "corner radius must be > 0" ) ; if ( cornerRadius == 0 ) { drawRect ( x , y , width , height ) ; return ; } int mr = ( int ) Math . min ( width , height ) / 2 ; // make sure that w & h are larger than 2 * cornerRadius if ( cornerRadius > mr ) { cornerRadius = mr ; } drawLine ( x + cornerRadius , y , x + width - cornerRadius , y ) ; drawLine ( x , y + cornerRadius , x , y + height - cornerRadius ) ; drawLine ( x + width , y + cornerRadius , x + width , y + height - cornerRadius ) ; drawLine ( x + cornerRadius , y + height , x + width - cornerRadius , y + height ) ; float d = cornerRadius * 2 ; // bottom right - 0 , 90 drawArc ( x + width - d , y + height - d , d , d , segs , 0 , 90 ) ; // bottom left - 90 , 180 drawArc ( x , y + height - d , d , d , segs , 90 , 180 ) ; // top right - 270 , 360 drawArc ( x + width - d , y , d , d , segs , 270 , 360 ) ; // top left - 180 , 270 drawArc ( x , y , d , d , segs , 180 , 270 ) ;
public class CallStack { /** * See above for why we can ' t implement jsonToken */ public HashMap < String , Object > getJsonToken ( ) throws Exception { } }
HashMap < String , Object > jRTObject = new HashMap < String , Object > ( ) ; ArrayList < Object > jThreads = new ArrayList < Object > ( ) ; for ( CallStack . Thread thread : threads ) { jThreads . add ( thread . jsonToken ( ) ) ; } jRTObject . put ( "threads" , jThreads ) ; jRTObject . put ( "threadCounter" , threadCounter ) ; return jRTObject ;
public class ConcurrentHashMultiset { /** * Sets the number of occurrences of { @ code element } to { @ code newCount } , but only if * the count is currently { @ code expectedOldCount } . If { @ code element } does not appear * in the multiset exactly { @ code expectedOldCount } times , no changes will be made . * @ return { @ code true } if the change was successful . This usually indicates * that the multiset has been modified , but not always : in the case that * { @ code expectedOldCount = = newCount } , the method will return { @ code true } if * the condition was met . * @ throws IllegalArgumentException if { @ code expectedOldCount } or { @ code newCount } is negative */ @ Override public boolean setCount ( E element , int expectedOldCount , int newCount ) { } }
checkNotNull ( element ) ; checkNonnegative ( expectedOldCount , "oldCount" ) ; checkNonnegative ( newCount , "newCount" ) ; AtomicInteger existingCounter = Maps . safeGet ( countMap , element ) ; if ( existingCounter == null ) { if ( expectedOldCount != 0 ) { return false ; } else if ( newCount == 0 ) { return true ; } else { // if our write lost the race , it must have lost to a nonzero value , so we can stop return countMap . putIfAbsent ( element , new AtomicInteger ( newCount ) ) == null ; } } int oldValue = existingCounter . get ( ) ; if ( oldValue == expectedOldCount ) { if ( oldValue == 0 ) { if ( newCount == 0 ) { // Just observed a 0 ; try to remove the entry to clean up the map countMap . remove ( element , existingCounter ) ; return true ; } else { AtomicInteger newCounter = new AtomicInteger ( newCount ) ; return ( countMap . putIfAbsent ( element , newCounter ) == null ) || countMap . replace ( element , existingCounter , newCounter ) ; } } else { if ( existingCounter . compareAndSet ( oldValue , newCount ) ) { if ( newCount == 0 ) { // Just CASed to 0 ; remove the entry to clean up the map . If the removal fails , // another thread has already replaced it with a new counter , which is fine . countMap . remove ( element , existingCounter ) ; } return true ; } } } return false ;
public class CmsJspTagJQuery { /** * Opens the direct edit tag , if manual mode is set then the next * start HTML for the direct edit buttons is printed to the page . < p > * @ return { @ link # EVAL _ BODY _ INCLUDE } * @ throws JspException in case something goes wrong */ @ Override public int doStartTag ( ) throws JspException { } }
ServletRequest req = pageContext . getRequest ( ) ; // This will always be true if the page is called through OpenCms if ( ! CmsFlexController . isCmsRequest ( req ) ) { return SKIP_BODY ; } if ( getJs ( ) == null ) { if ( isDynamic ( ) ) { // in case we want to include the needed js functions try { pageContext . getOut ( ) . print ( "<script type='text/javascript' src='" + CmsWorkplace . getSkinUri ( ) + VFS_PATH_LOAD_JS + "' ></script>" ) ; } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_PROCESS_TAG_1 , "jquery" ) , ex ) ; } throw new JspException ( ex ) ; } } return SKIP_BODY ; } // get the server prefix CmsObject cms = CmsFlexController . getCmsObject ( req ) ; // first handle js file String path = VFS_PATH_JQUERY ; if ( cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) { // online path += VFS_PATH_PACKED ; } else { // offline path += VFS_PATH_UNPACKED ; } String file = path + getJs ( ) + EXTENSION_JS ; try { cms . readResource ( CmsWorkplace . VFS_PATH_RESOURCES + file ) ; if ( isDynamic ( ) ) { pageContext . getOut ( ) . print ( "<script type='text/javascript'>load_script('" + CmsWorkplace . getSkinUri ( ) + file + "', 'js');</script>" ) ; } else { pageContext . getOut ( ) . print ( "<script type='text/javascript' src='" + CmsWorkplace . getSkinUri ( ) + file + "' ></script>" ) ; } } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_PROCESS_TAG_1 , "jquery" ) , ex ) ; } throw new JspException ( ex ) ; } if ( getCss ( ) == null ) { return SKIP_BODY ; } // now handle css file path = VFS_PATH_CSS ; file = path + getCss ( ) + EXTENSION_CSS ; try { cms . readResource ( CmsWorkplace . VFS_PATH_RESOURCES + file ) ; pageContext . getOut ( ) . println ( ) ; if ( isDynamic ( ) ) { pageContext . getOut ( ) . print ( "<script type='text/javascript'>load_script('" + CmsWorkplace . getSkinUri ( ) + file + "', 'css');</script>" ) ; } else { pageContext . getOut ( ) . print ( "<link href='" + CmsWorkplace . getSkinUri ( ) + file + "' rel='stylesheet' type='text/css' >" ) ; } } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_PROCESS_TAG_1 , "jquery" ) , ex ) ; } throw new JspException ( ex ) ; } return SKIP_BODY ;
public class MathUtil { /** * Replies if the given values are near . * @ param v1 first value . * @ param v2 second value . * @ return < code > true < / code > if the given { @ code v1} * is near { @ code v2 } , otherwise < code > false < / code > . * @ see Math # ulp ( double ) */ @ Pure @ Inline ( value = "MathUtil.isEpsilonEqual($1, $2, Double.NaN)" , imported = { } }
MathUtil . class } ) public static boolean isEpsilonEqual ( double v1 , double v2 ) { return isEpsilonEqual ( v1 , v2 , Double . NaN ) ;
public class ClosedHashingUtil { /** * Gets the next twin prime that is near a power of 2 and greater than or * equal to the given value * @ param m the integer to get a twine prime larger than * @ return the a twin prime greater than or equal to */ public static int getNextPow2TwinPrime ( int m ) { } }
int pos = Arrays . binarySearch ( twinPrimesP2 , m + 1 ) ; if ( pos >= 0 ) return twinPrimesP2 [ pos ] ; else return twinPrimesP2 [ - pos - 1 ] ;
public class MtasDataItemDoubleFull { /** * ( non - Javadoc ) * @ see java . lang . Comparable # compareTo ( java . lang . Object ) */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public int compareTo ( MtasDataItem < Double , Double > o ) { int compare = 0 ; if ( o instanceof MtasDataItemDoubleFull ) { MtasDataItemDoubleFull to = ( MtasDataItemDoubleFull ) o ; MtasDataItemNumberComparator c1 = getComparableValue ( ) ; MtasDataItemNumberComparator c2 = to . getComparableValue ( ) ; compare = ( c1 != null && c2 != null ) ? c1 . compareTo ( c2 . getValue ( ) ) : 0 ; } return sortDirection . equals ( CodecUtil . SORT_DESC ) ? - 1 * compare : compare ;
public class PathBuilder { /** * Create a new DateTime path * @ param < A > * @ param path existing path * @ return property path */ @ SuppressWarnings ( "unchecked" ) public < A extends Comparable < ? > > DateTimePath < A > get ( DateTimePath < A > path ) { } }
DateTimePath < A > newPath = getDateTime ( toString ( path ) , ( Class < A > ) path . getType ( ) ) ; return addMetadataOf ( newPath , path ) ;
public class ExtensionLoader { /** * Tells whether or not an { @ code Extension } with the given * { @ code extensionName } is enabled . * @ param extensionName the name of the extension * @ return { @ code true } if the extension is enabled , { @ code false } * otherwise . * @ throws IllegalArgumentException if the { @ code extensionName } is * { @ code null } . * @ see # getExtension ( String ) * @ see Extension */ public boolean isExtensionEnabled ( String extensionName ) { } }
if ( extensionName == null ) { throw new IllegalArgumentException ( "Parameter extensionName must not be null." ) ; } Extension extension = getExtension ( extensionName ) ; if ( extension == null ) { return false ; } return extension . isEnabled ( ) ;
public class CooccurrenceKeywordExtractor { /** * Returns a given number of top keywords . * @ param text A single document . * @ return The top keywords . */ public ArrayList < NGram > extract ( String text , int maxNumKeywords ) { } }
ArrayList < String [ ] > sentences = new ArrayList < > ( ) ; SimpleTokenizer tokenizer = new SimpleTokenizer ( ) ; PorterStemmer stemmer = new PorterStemmer ( ) ; // Split text into sentences . Stem words by Porter algorithm . int ntotal = 0 ; for ( String paragraph : SimpleParagraphSplitter . getInstance ( ) . split ( text ) ) { for ( String s : SimpleSentenceSplitter . getInstance ( ) . split ( paragraph ) ) { String [ ] sentence = tokenizer . split ( s ) ; for ( int i = 0 ; i < sentence . length ; i ++ ) { sentence [ i ] = stemmer . stripPluralParticiple ( sentence [ i ] ) . toLowerCase ( ) ; } sentences . add ( sentence ) ; ntotal += sentence . length ; } } // Extract phrases by Apriori - like algorithm . int maxNGramSize = 4 ; ArrayList < NGram > terms = new ArrayList < > ( ) ; AprioriPhraseExtractor phraseExtractor = new AprioriPhraseExtractor ( ) ; for ( ArrayList < NGram > ngrams : phraseExtractor . extract ( sentences , maxNGramSize , 4 ) ) { for ( NGram ngram : ngrams ) { terms . add ( ngram ) ; } } Collections . sort ( terms ) ; // Select upto 30 % most frequent terms . int n = 3 * terms . size ( ) / 10 ; NGram [ ] freqTerms = new NGram [ n ] ; for ( int i = 0 , start = terms . size ( ) - n ; i < n ; i ++ ) { freqTerms [ i ] = terms . get ( start + i ) ; } // Trie for phrase matching . Trie < String , Integer > trie = new Trie < > ( ) ; for ( int i = 0 ; i < n ; i ++ ) { trie . put ( freqTerms [ i ] . words , i ) ; } // Build co - occurrence table int [ ] nw = new int [ n ] ; int [ ] [ ] table = new int [ n ] [ n ] ; for ( String [ ] sentence : sentences ) { Set < Integer > phrases = new HashSet < > ( ) ; for ( int j = 1 ; j <= maxNGramSize ; j ++ ) { for ( int i = 0 ; i <= sentence . length - j ; i ++ ) { String [ ] phrase = Arrays . copyOfRange ( sentence , i , i + j ) ; Integer index = trie . get ( phrase ) ; if ( index != null ) { phrases . add ( index ) ; } } } for ( int i : phrases ) { nw [ i ] += phrases . size ( ) ; for ( int j : phrases ) { if ( i != j ) { table [ i ] [ j ] ++ ; } } } } // Clustering frequent terms . int [ ] cluster = new int [ n ] ; for ( int i = 0 ; i < cluster . length ; i ++ ) { cluster [ i ] = i ; } // double log2 = Math . log ( 2.0 ) ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = i + 1 ; j < n ; j ++ ) { // Mutual information if ( table [ i ] [ j ] > 0 ) { // This doesn ' t work as ntotal is usually large and thus the mutual information // is way larger than the threshold log2 given in the paper . // double mutual = Math . log ( ( double ) ntotal * table [ i ] [ j ] / ( freqTerms [ i ] . freq * freqTerms [ j ] . freq ) ) ; // Here we just use the ( squared ) geometric average of co - occurrence probability // It works well to clustering things like " digital computer " and " computer " in practice . double mutual = ( double ) table [ i ] [ j ] * table [ i ] [ j ] / ( freqTerms [ i ] . freq * freqTerms [ j ] . freq ) ; if ( mutual >= 0.25 ) { cluster [ j ] = cluster [ i ] ; } /* else { double js = 0.0 ; / / Jsensen - Shannon divergence for ( int k = 0 ; k < n ; k + + ) { double p1 = ( double ) table [ i ] [ k ] / freqTerms [ i ] . freq ; double p2 = ( double ) table [ j ] [ k ] / freqTerms [ j ] . freq ; / / The formula in the paper is not correct as p is not real probablity . if ( p1 > 0 & & p2 > 0 ) { js + = - ( p1 + p2 ) * Math . log ( ( p1 + p2 ) / 2.0 ) + p1 * Math . log ( p1 ) + p2 * Math . log ( p2 ) ; js / = 2.0; if ( js > log2 ) { cluster [ j ] = cluster [ i ] ; */ } } } // Calculate expected probability double [ ] pc = new double [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { pc [ cluster [ j ] ] += table [ i ] [ j ] ; } } for ( int i = 0 ; i < n ; i ++ ) { pc [ i ] /= ntotal ; } // Calculate chi - square scores . double [ ] score = new double [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { double max = Double . NEGATIVE_INFINITY ; for ( int j = 0 ; j < n ; j ++ ) { if ( cluster [ j ] != j ) { continue ; } double fwc = 0.0 ; for ( int k = 0 ; k < n ; k ++ ) { if ( cluster [ k ] == j ) fwc += table [ i ] [ k ] ; } double expected = nw [ i ] * pc [ j ] ; double d = ( fwc - expected ) ; double chisq = d * d / expected ; score [ i ] += chisq ; if ( chisq > max ) max = chisq ; } // score [ i ] - = max ; } int [ ] index = QuickSort . sort ( score ) ; ArrayList < NGram > keywords = new ArrayList < > ( ) ; for ( int i = n ; i -- > 0 ; ) { boolean add = true ; // filter out components of phrases , e . g . " digital " in " digital computer " . for ( int j = i + 1 ; j < n ; j ++ ) { if ( cluster [ index [ j ] ] == cluster [ index [ i ] ] ) { if ( freqTerms [ index [ j ] ] . words . length >= freqTerms [ index [ i ] ] . words . length ) { add = false ; break ; } else { keywords . remove ( freqTerms [ index [ j ] ] ) ; add = true ; } } } if ( add ) { keywords . add ( freqTerms [ index [ i ] ] ) ; if ( keywords . size ( ) >= maxNumKeywords ) break ; } } return keywords ;
public class X509CRLImpl { /** * Parses an X . 509 CRL , should be used only by constructors . */ private void parse ( DerValue val ) throws CRLException , IOException { } }
// check if can over write the certificate if ( readOnly ) throw new CRLException ( "cannot over-write existing CRL" ) ; if ( val . getData ( ) == null || val . tag != DerValue . tag_Sequence ) throw new CRLException ( "Invalid DER-encoded CRL data" ) ; signedCRL = val . toByteArray ( ) ; DerValue seq [ ] = new DerValue [ 3 ] ; seq [ 0 ] = val . data . getDerValue ( ) ; seq [ 1 ] = val . data . getDerValue ( ) ; seq [ 2 ] = val . data . getDerValue ( ) ; if ( val . data . available ( ) != 0 ) throw new CRLException ( "signed overrun, bytes = " + val . data . available ( ) ) ; if ( seq [ 0 ] . tag != DerValue . tag_Sequence ) throw new CRLException ( "signed CRL fields invalid" ) ; sigAlgId = AlgorithmId . parse ( seq [ 1 ] ) ; signature = seq [ 2 ] . getBitString ( ) ; if ( seq [ 1 ] . data . available ( ) != 0 ) throw new CRLException ( "AlgorithmId field overrun" ) ; if ( seq [ 2 ] . data . available ( ) != 0 ) throw new CRLException ( "Signature field overrun" ) ; // the tbsCertsList tbsCertList = seq [ 0 ] . toByteArray ( ) ; // parse the information DerInputStream derStrm = seq [ 0 ] . data ; DerValue tmp ; byte nextByte ; // version ( optional if v1) version = 0 ; // by default , version = v1 = = 0 nextByte = ( byte ) derStrm . peekByte ( ) ; if ( nextByte == DerValue . tag_Integer ) { version = derStrm . getInteger ( ) ; if ( version != 1 ) // i . e . v2 throw new CRLException ( "Invalid version" ) ; } tmp = derStrm . getDerValue ( ) ; // signature AlgorithmId tmpId = AlgorithmId . parse ( tmp ) ; // the " inner " and " outer " signature algorithms must match if ( ! tmpId . equals ( sigAlgId ) ) throw new CRLException ( "Signature algorithm mismatch" ) ; infoSigAlgId = tmpId ; // issuer issuer = new X500Name ( derStrm ) ; if ( issuer . isEmpty ( ) ) { throw new CRLException ( "Empty issuer DN not allowed in X509CRLs" ) ; } // thisUpdate // check if UTCTime encoded or GeneralizedTime nextByte = ( byte ) derStrm . peekByte ( ) ; if ( nextByte == DerValue . tag_UtcTime ) { thisUpdate = derStrm . getUTCTime ( ) ; } else if ( nextByte == DerValue . tag_GeneralizedTime ) { thisUpdate = derStrm . getGeneralizedTime ( ) ; } else { throw new CRLException ( "Invalid encoding for thisUpdate" + " (tag=" + nextByte + ")" ) ; } if ( derStrm . available ( ) == 0 ) return ; // done parsing no more optional fields present // nextUpdate ( optional ) nextByte = ( byte ) derStrm . peekByte ( ) ; if ( nextByte == DerValue . tag_UtcTime ) { nextUpdate = derStrm . getUTCTime ( ) ; } else if ( nextByte == DerValue . tag_GeneralizedTime ) { nextUpdate = derStrm . getGeneralizedTime ( ) ; } // else it is not present if ( derStrm . available ( ) == 0 ) return ; // done parsing no more optional fields present // revokedCertificates ( optional ) nextByte = ( byte ) derStrm . peekByte ( ) ; if ( ( nextByte == DerValue . tag_SequenceOf ) && ( ! ( ( nextByte & 0x0c0 ) == 0x080 ) ) ) { DerValue [ ] badCerts = derStrm . getSequence ( 4 ) ; X500Principal crlIssuer = getIssuerX500Principal ( ) ; X500Principal badCertIssuer = crlIssuer ; for ( int i = 0 ; i < badCerts . length ; i ++ ) { X509CRLEntryImpl entry = new X509CRLEntryImpl ( badCerts [ i ] ) ; badCertIssuer = getCertIssuer ( entry , badCertIssuer ) ; entry . setCertificateIssuer ( crlIssuer , badCertIssuer ) ; X509IssuerSerial issuerSerial = new X509IssuerSerial ( badCertIssuer , entry . getSerialNumber ( ) ) ; revokedMap . put ( issuerSerial , entry ) ; revokedList . add ( entry ) ; } } if ( derStrm . available ( ) == 0 ) return ; // done parsing no extensions // crlExtensions ( optional ) tmp = derStrm . getDerValue ( ) ; if ( tmp . isConstructed ( ) && tmp . isContextSpecific ( ( byte ) 0 ) ) { extensions = new CRLExtensions ( tmp . data ) ; } readOnly = true ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcStairFlightType ( ) { } }
if ( ifcStairFlightTypeEClass == null ) { ifcStairFlightTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 533 ) ; } return ifcStairFlightTypeEClass ;
public class AABBUtils { /** * Gets an identity { @ link AxisAlignedBB } ( size 1x1x1 ) at { @ link BlockPos } position ; * @ param pos the pos * @ return the axis aligned bb */ public static AxisAlignedBB identity ( BlockPos pos ) { } }
return new AxisAlignedBB ( pos . getX ( ) , pos . getY ( ) , pos . getZ ( ) , pos . getX ( ) + 1 , pos . getY ( ) + 1 , pos . getZ ( ) + 1 ) ;
public class Model { /** * Delete this model within the given transaction * @ param t * The transaction to delete this model in */ final public void delete ( Transaction t ) { } }
t . delete ( Utils . getTableName ( getClass ( ) ) , Utils . getWhereStatement ( this ) ) ; t . addOnTransactionCommittedListener ( new OnTransactionCommittedListener ( ) { @ Override public void onTransactionCommitted ( ) { Sprinkles . sInstance . mContext . getContentResolver ( ) . notifyChange ( Utils . getNotificationUri ( Model . this . getClass ( ) ) , null ) ; } } ) ; afterDelete ( ) ;
public class PayloadElementParser { /** * Static parse method taking care of payload element . * @ param payloadElement */ public static String parseMessagePayload ( Element payloadElement ) { } }
if ( payloadElement == null ) { return "" ; } try { Document payload = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) . newDocument ( ) ; payload . appendChild ( payload . importNode ( payloadElement , true ) ) ; String payloadData = XMLUtils . serialize ( payload ) ; // temporary quickfix for unwanted testcase namespace in target payload payloadData = payloadData . replaceAll ( " xmlns=\\\"http://www.citrusframework.org/schema/testcase\\\"" , "" ) ; return payloadData . trim ( ) ; } catch ( DOMException e ) { throw new CitrusRuntimeException ( "Error while constructing message payload" , e ) ; } catch ( ParserConfigurationException e ) { throw new CitrusRuntimeException ( "Error while constructing message payload" , e ) ; }
public class TiffITProfile { /** * Check required tag is present , and its cardinality and value is correct . * @ param metadata the metadata * @ param tagName the name of the mandatory tag * @ param cardinality the mandatory cardinality * @ param possibleValues the possible tag values * @ return true , if tag is found */ private boolean checkRequiredTag ( IfdTags metadata , String tagName , int cardinality , long [ ] possibleValues ) { } }
boolean ok = true ; int tagid = TiffTags . getTagId ( tagName ) ; if ( ! metadata . containsTagId ( tagid ) ) { validation . addErrorLoc ( "Missing required tag for TiffIT" + profile + " " + tagName , "IFD" + currentIfd ) ; ok = false ; } else if ( cardinality != - 1 && metadata . get ( tagid ) . getCardinality ( ) != cardinality ) { validation . addError ( "Invalid cardinality for TiffIT" + profile + " tag " + tagName , "IFD" + currentIfd , metadata . get ( tagid ) . getCardinality ( ) ) ; } else if ( cardinality == 1 && possibleValues != null ) { long val = metadata . get ( tagid ) . getFirstNumericValue ( ) ; boolean contained = false ; int i = 0 ; while ( i < possibleValues . length && ! contained ) { contained = possibleValues [ i ] == val ; i ++ ; } if ( ! contained ) validation . addError ( "Invalid value for TiffIT" + profile + " tag " + tagName , "IFD" + currentIfd , val ) ; } return ok ;
public class RepositoryModule { /** * Configures repository annotations interceptor . */ protected void configureAop ( ) { } }
final RepositoryMethodInterceptor proxy = new RepositoryMethodInterceptor ( ) ; requestInjection ( proxy ) ; // repository specific method annotations ( query , function , delegate , etc . ) bindInterceptor ( Matchers . any ( ) , new AbstractMatcher < Method > ( ) { @ Override public boolean matches ( final Method method ) { // this will throw error if two or more annotations specified ( fail fast ) try { return ExtUtils . findMethodAnnotation ( method ) != null ; } catch ( Exception ex ) { throw new MethodDefinitionException ( String . format ( "Error declaration on method %s" , RepositoryUtils . methodToString ( method ) ) , ex ) ; } } } , proxy ) ;
public class NicInterfaceCriteria { /** * { @ inheritDoc } * @ return < code > address < / code > if the { @ link # getAcceptableName ( ) acceptable name } * equals < code > networkInterface < / code > ' s { @ link NetworkInterface # getName ( ) name } . */ @ Override protected InetAddress isAcceptable ( NetworkInterface networkInterface , InetAddress address ) throws SocketException { } }
if ( name . equals ( networkInterface . getName ( ) ) ) return address ; return null ;
public class UserFeedback { /** * Add a Info UserFeedbackEvent and log . */ public void info ( UserFeedbackEvent . Stage stage , String message ) { } }
Log . info ( stage + ": " + message ) ; addEvent ( new UserFeedbackEvent ( autoML , UserFeedbackEvent . Level . Info , stage , message ) ) ;
public class ApiOvhDomain { /** * Delete a SMD file * REST : DELETE / domain / data / smd / { smdId } * @ param smdId [ required ] SMD ID */ public void data_smd_smdId_DELETE ( Long smdId ) throws IOException { } }
String qPath = "/domain/data/smd/{smdId}" ; StringBuilder sb = path ( qPath , smdId ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ;
public class BaseApplet { /** * Display the status text . * @ param strMessage The message to display . */ public Object setStatus ( int iStatus , Object comp , Object cursor ) { } }
Cursor oldCursor = null ; if ( comp instanceof Component ) if ( SwingUtilities . isEventDispatchThread ( ) ) // Just being careful { oldCursor = ( ( Component ) comp ) . getCursor ( ) ; if ( cursor == null ) cursor = ( Cursor ) Cursor . getPredefinedCursor ( iStatus ) ; ( ( Component ) comp ) . setCursor ( ( Cursor ) cursor ) ; } if ( m_statusbar != null ) m_statusbar . setStatus ( iStatus ) ; return oldCursor ;
public class JinxUtils { /** * Convert a { @ link net . jeremybrooks . jinx . JinxConstants . PrivacyFilter } enum into the corresponding Flickr * privacy filter id . * @ param privacyFilter privacy filter enum to convert . * @ return Flickr privacy filter id , or 0 if argument is null . */ public static int privacyFilterToFlickrPrivacyFilterId ( JinxConstants . PrivacyFilter privacyFilter ) { } }
if ( privacyFilter == null ) { return - 1 ; } int level ; switch ( privacyFilter ) { case privacyPublic : level = 1 ; break ; case privacyFriends : level = 2 ; break ; case privacyFamily : level = 3 ; break ; case privacyFriendsAndFamily : level = 4 ; break ; case privacyPrivate : level = 5 ; break ; default : level = - 1 ; break ; } return level ;
public class ClientSideHandlerGeneratorImpl { /** * Creates a javascript object that represents a bundle * @ param bundle * the bundle * @ param variants * the variant map * @ param buf * the buffer * @ param useGzip * the flag indicating if we use gzip compression or not . */ private void appendBundle ( JoinableResourceBundle bundle , Map < String , String > variants , StringBuffer buf , boolean useGzip ) { } }
buf . append ( "r(" ) . append ( JavascriptStringUtil . quote ( bundle . getId ( ) ) ) . append ( "," ) ; String path = bundle . getURLPrefix ( variants ) ; if ( useGzip ) { if ( path . charAt ( 0 ) == '/' ) { path = path . substring ( 1 ) ; // remove leading ' / ' } buf . append ( JavascriptStringUtil . quote ( BundleRenderer . GZIP_PATH_PREFIX + path ) ) ; } else { if ( path . charAt ( 0 ) != '/' ) { path = "/" + path ; // Add leading ' / ' } buf . append ( JavascriptStringUtil . quote ( path ) ) ; } boolean skipItems = false ; if ( bundle . getItemPathList ( ) . size ( ) == 1 && null == bundle . getExplorerConditionalExpression ( ) ) { skipItems = bundle . getItemPathList ( ) . get ( 0 ) . getPath ( ) . equals ( bundle . getId ( ) ) ; } if ( ! skipItems ) { buf . append ( ",[" ) ; for ( Iterator < BundlePath > it = bundle . getItemPathList ( variants ) . iterator ( ) ; it . hasNext ( ) ; ) { path = it . next ( ) . getPath ( ) ; if ( this . config . getGeneratorRegistry ( ) . isPathGenerated ( path ) ) { path = PathNormalizer . createGenerationPath ( path , this . config . getGeneratorRegistry ( ) , null ) ; } if ( "" . equals ( this . config . getContextPathOverride ( ) ) && path . startsWith ( "/" ) ) path = path . substring ( 1 ) ; buf . append ( JavascriptStringUtil . quote ( path ) ) ; if ( it . hasNext ( ) ) buf . append ( "," ) ; } buf . append ( "]" ) ; if ( null != bundle . getExplorerConditionalExpression ( ) ) { buf . append ( ",'" ) . append ( bundle . getExplorerConditionalExpression ( ) ) . append ( "'" ) ; } } if ( null != bundle . getAlternateProductionURL ( ) ) { // Complete the parameters if needed , since the alternate param goes // afterwards if ( skipItems ) buf . append ( ",null,null" ) ; else if ( null == bundle . getExplorerConditionalExpression ( ) ) buf . append ( ",null" ) ; buf . append ( "," ) . append ( JavascriptStringUtil . quote ( bundle . getAlternateProductionURL ( ) ) ) ; } buf . append ( ")" ) ;
public class DigestCredentials { /** * generate digest token based on RFC 2069 and RFC 2617 guidelines * @ return digest token */ private String generateDigest ( boolean passwordAlreadyEncoded , String username , String realm , String password , String httpMethod , String uri , String qop , String nonce , String nc , String cnonce ) { } }
String ha1 ; String a2 = httpMethod + ":" + uri ; String ha2 = CredentialUtil . encryptMD5 ( a2 ) ; if ( passwordAlreadyEncoded ) { ha1 = password ; } else { ha1 = CredentialUtil . encryptMD5 ( username + ":" + realm + ":" + password ) ; } String digest ; if ( qop == null ) { digest = CredentialUtil . encryptMD5 ( ha1 , nonce + ":" + ha2 ) ; } else if ( "auth" . equals ( qop ) ) { digest = CredentialUtil . encryptMD5 ( ha1 , nonce + ":" + nc + ":" + cnonce + ":" + qop + ":" + ha2 ) ; } else { throw new TechnicalException ( "Invalid qop: '" + qop + "'" ) ; } return digest ;
public class VariableNumPattern { /** * Gets a { @ code VariableNumPattern } which uses the names in { @ code templateVars } * as the name templates for matching variables . The names are specified in a * rudimentary pattern language : if a name contains a " ? ( x ) " , this portion is * allowed to match any integer value . x is a number which is an integer * offset for the match . * The returned pattern only works when applied to { @ code variableSet } . * @ param templateVariables * @ param fixedVariables * @ param variableSet * @ return */ public static VariableNumPattern fromTemplateVariables ( VariableNumMap templateVariables , VariableNumMap fixedVariables , DynamicVariableSet variableSet ) { } }
int [ ] plateStarts = new int [ templateVariables . size ( ) ] ; int [ ] plateEnds = new int [ templateVariables . size ( ) ] ; int [ ] plateReplicationSizes = new int [ templateVariables . size ( ) ] ; int [ ] plateVarOffsets = new int [ templateVariables . size ( ) ] ; int [ ] plateMatchIndexOffsets = new int [ templateVariables . size ( ) ] ; int [ ] templateVariableNums = templateVariables . getVariableNumsArray ( ) ; String [ ] templateVariableNames = templateVariables . getVariableNamesArray ( ) ; for ( int i = 0 ; i < templateVariableNums . length ; i ++ ) { String variableName = templateVariableNames [ i ] ; String [ ] parts = variableSet . partitionVariableName ( variableName ) ; Preconditions . checkArgument ( parts . length == 3 ) ; String plateName = parts [ 0 ] ; int varOffset = Integer . parseInt ( parts [ 1 ] . replaceAll ( "[?()]" , "" ) ) ; String withinPlateVariableName = parts [ 2 ] ; plateStarts [ i ] = variableSet . getPlateStartIndex ( plateName ) ; plateEnds [ i ] = variableSet . getPlateEndIndex ( plateName ) ; plateReplicationSizes [ i ] = variableSet . getPlate ( plateName ) . getMaximumPlateSize ( ) ; plateVarOffsets [ i ] = variableSet . getPlate ( plateName ) . getFixedVariables ( ) . getVariableByName ( withinPlateVariableName ) ; plateMatchIndexOffsets [ i ] = varOffset ; } // Normalize the index offsets so the maximum offset is 0. if ( plateMatchIndexOffsets . length > 0 ) { int maxOffset = Ints . max ( plateMatchIndexOffsets ) ; for ( int i = 0 ; i < plateMatchIndexOffsets . length ; i ++ ) { plateMatchIndexOffsets [ i ] -= maxOffset ; } } return new VariableNumPattern ( plateStarts , plateEnds , plateReplicationSizes , plateVarOffsets , plateMatchIndexOffsets , templateVariables , fixedVariables ) ;
public class SkillDetails { /** * The list of reviews for the skill , including Key and Value pair . * @ param reviews * The list of reviews for the skill , including Key and Value pair . * @ return Returns a reference to this object so that method calls can be chained together . */ public SkillDetails withReviews ( java . util . Map < String , String > reviews ) { } }
setReviews ( reviews ) ; return this ;
public class ToolsAwt { /** * Get the image transparency equivalence . * @ param transparency The transparency type ( must not be < code > null < / code > ) . * @ return The transparency value . * @ throws LionEngineException If invalid argument . */ public static int getTransparency ( Transparency transparency ) { } }
Check . notNull ( transparency ) ; final int value ; if ( Transparency . OPAQUE == transparency ) { value = java . awt . Transparency . OPAQUE ; } else if ( Transparency . BITMASK == transparency ) { value = java . awt . Transparency . BITMASK ; } else if ( Transparency . TRANSLUCENT == transparency ) { value = java . awt . Transparency . TRANSLUCENT ; } else { throw new LionEngineException ( transparency ) ; } return value ;
public class VirtualNetworkGatewaysInner { /** * Resets the VPN client shared key of the virtual network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginResetVpnClientSharedKey ( String resourceGroupName , String virtualNetworkGatewayName ) { } }
beginResetVpnClientSharedKeyWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DDataSource { /** * Use this to force asynchronous mode ( indexer tasks ) . You can then call * { @ link DDataSource # pollIndexerTaskStatus ( java . lang . String ) } to poll and find * status . Hard limit on any task is 2 hours . See { @ link OverlordAccessor } for more . * @ param sqlOrJsonQuery * @ param namedParams * @ param reqHeaders * @ param printToConsole * @ param queryMode * @ param forceAsync * @ return */ public Either < String , Either < Joiner4All , Mapper4All > > query ( String sqlOrJsonQuery , NamedParameters namedParams , Map < String , String > reqHeaders , boolean printToConsole , String queryMode , boolean forceAsync ) { } }
if ( "json" . equals ( queryMode ) ) { // TODO : # 19 Either < String , Either < Mapper4All , JSONArray > > result = broker . fireQuery ( sqlOrJsonQuery , reqHeaders , true ) ; if ( result . isLeft ( ) ) return new Left < > ( result . left ( ) . get ( ) ) ; if ( printToConsole ) { println ( result . right ( ) . get ( ) . left ( ) . get ( ) . toString ( ) ) ; } return new Right < String , Either < Joiner4All , Mapper4All > > ( new Right < Joiner4All , Mapper4All > ( result . right ( ) . get ( ) . left ( ) . get ( ) ) ) ; } Program pgm ; try { pgm = getCompiledAST ( sqlOrJsonQuery , namedParams , reqHeaders ) ; } catch ( Exception ex ) { return new Left < > ( ex . getMessage ( ) ) ; } if ( pgm instanceof DeleteProgram ) { return new Left < > ( deleteRows ( ( DeleteProgram ) pgm , reqHeaders , printToConsole ) ) ; } else if ( pgm instanceof DropProgram ) { return new Left < > ( dropTable ( ( DropProgram ) pgm , reqHeaders , printToConsole ) ) ; } else if ( pgm instanceof InsertProgram ) { InsertProgram iPgm = ( InsertProgram ) pgm ; iPgm . print ( printToConsole ) ; return new Left < > ( overlord . fireTask ( iPgm . nthStmnt ( 0 ) , reqHeaders , ! forceAsync && iPgm . waitForCompletion ) ) ; } else { return selectRows ( ( QueryProgram ) pgm , reqHeaders , printToConsole ) ; }
public class StreamUtils { /** * Retry copy attempts from input stream to output stream . Does * not * check to make sure data was intact during the transfer * @ param byteSource Supplier for input streams to copy from . The stream is closed on every retry . * @ param byteSink Supplier for output streams . The stream is closed on every retry . * @ param shouldRetry Predicate to determine if the throwable is recoverable for a retry * @ param maxAttempts Maximum number of retries before failing */ public static long retryCopy ( final ByteSource byteSource , final ByteSink byteSink , final Predicate < Throwable > shouldRetry , final int maxAttempts ) { } }
try { return RetryUtils . retry ( ( ) -> { try ( InputStream inputStream = byteSource . openStream ( ) ) { try ( OutputStream outputStream = byteSink . openStream ( ) ) { final long retval = ByteStreams . copy ( inputStream , outputStream ) ; // Workarround for http : / / hg . openjdk . java . net / jdk8 / jdk8 / jdk / rev / 759aa847dcaf outputStream . flush ( ) ; return retval ; } } } , shouldRetry , maxAttempts ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class DirectLogFetcher { /** * Connect MySQL master to fetch binlog . */ public void open ( Connection conn , String fileName , final int serverId , boolean nonBlocking ) throws IOException { } }
open ( conn , fileName , BIN_LOG_HEADER_SIZE , serverId , nonBlocking ) ;
public class IndentingXMLStreamWriter { /** * Prepare to end an element , by writing a new line and indentation . */ protected void beforeEndElement ( ) { } }
if ( depth > 0 && stack [ depth ] == WROTE_MARKUP ) { // but not data try { writeNewLine ( depth - 1 ) ; } catch ( Exception ignored ) { ignored . printStackTrace ( ) ; } }
public class PropertiesManager { /** * Notify all listeners that a property has changed . * @ param property * the property whose value has changed */ private void firePropertyChanged ( T property ) { } }
PropertyEvent < T > event = null ; for ( PropertyListener < T > l : listeners ) { if ( event == null ) { event = new PropertyEvent < T > ( this , property ) ; } l . changed ( event ) ; }
public class RepresentationModelProcessorHandlerMethodReturnValueHandler { /** * Re - wraps the result of the post - processing work into an { @ link HttpEntity } or { @ link ResponseEntity } if the * original value was one of those two types . Copies headers and status code from the original value but uses the new * body . * @ param newBody the post - processed value . * @ param originalValue the original input value . * @ return */ Object rewrapResult ( RepresentationModel < ? > newBody , @ Nullable Object originalValue ) { } }
if ( ! ( originalValue instanceof HttpEntity ) ) { return rootLinksAsHeaders ? HeaderLinksResponseEntity . wrap ( newBody ) : newBody ; } HttpEntity < RepresentationModel < ? > > entity = null ; if ( originalValue instanceof ResponseEntity ) { ResponseEntity < ? > source = ( ResponseEntity < ? > ) originalValue ; entity = new ResponseEntity < > ( newBody , source . getHeaders ( ) , source . getStatusCode ( ) ) ; } else { HttpEntity < ? > source = ( HttpEntity < ? > ) originalValue ; entity = new HttpEntity < > ( newBody , source . getHeaders ( ) ) ; } return rootLinksAsHeaders ? HeaderLinksResponseEntity . wrap ( entity ) : entity ;
public class DOInstance { /** * TODO : remove this soon */ public @ Nonnull Iterable < VirtualMachineProduct > listProducts ( @ Nonnull VirtualMachineProductFilterOptions options , @ Nullable Architecture architecture ) throws InternalException , CloudException { } }
String cacheName = "ALL" ; if ( architecture != null ) { cacheName = architecture . name ( ) ; } Cache < VirtualMachineProduct > cache = Cache . getInstance ( getProvider ( ) , "products" + cacheName , VirtualMachineProduct . class , CacheLevel . REGION , new TimePeriod < Day > ( 1 , TimePeriod . DAY ) ) ; Iterable < VirtualMachineProduct > products = cache . get ( getContext ( ) ) ; if ( products != null && products . iterator ( ) . hasNext ( ) ) { return products ; } List < VirtualMachineProduct > list = new ArrayList < VirtualMachineProduct > ( ) ; // Perform DigitalOcean query Sizes availableSizes = ( Sizes ) DigitalOceanModelFactory . getModel ( getProvider ( ) , org . dasein . cloud . digitalocean . models . rest . DigitalOcean . SIZES ) ; if ( availableSizes != null ) { for ( Size s : availableSizes . getSizes ( ) ) { VirtualMachineProduct product = toProduct ( s ) ; if ( product != null ) { list . add ( product ) ; } } cache . put ( getContext ( ) , list ) ; } else { logger . error ( "No product could be found, " + getProvider ( ) . getCloudName ( ) + " provided no data for their sizes API." ) ; throw new CloudException ( "No product could be found." ) ; } return list ;
public class Filters { /** * Equivalent to { @ link # replaceInString ( java . util . regex . Pattern , * String ) } but takes the regular expression * as string and default overlap in 80 characters . * @ param regexp the regular expression * @ param replacement the string to be substituted for each match * @ return the filter */ public static Filter replaceInString ( final String regexp , final String replacement ) { } }
return replaceInString ( Pattern . compile ( regexp ) , replacement , DEFAULT_FILTER_OVERLAP ) ;
public class DebMaker { /** * Validates the input parameters . */ public void validate ( ) throws PackagingException { } }
if ( control == null || ! control . isDirectory ( ) ) { throw new PackagingException ( "The 'control' attribute doesn't point to a directory. " + control ) ; } if ( changesIn != null ) { if ( changesIn . exists ( ) && ( ! changesIn . isFile ( ) || ! changesIn . canRead ( ) ) ) { throw new PackagingException ( "The 'changesIn' setting needs to point to a readable file. " + changesIn + " was not found/readable." ) ; } if ( changesOut != null && ! isWritableFile ( changesOut ) ) { throw new PackagingException ( "Cannot write the output for 'changesOut' to " + changesOut ) ; } if ( changesSave != null && ! isWritableFile ( changesSave ) ) { throw new PackagingException ( "Cannot write the output for 'changesSave' to " + changesSave ) ; } } else { if ( changesOut != null || changesSave != null ) { throw new PackagingException ( "The 'changesOut' or 'changesSave' settings may only be used when there is a 'changesIn' specified." ) ; } } if ( Compression . toEnum ( compression ) == null ) { throw new PackagingException ( "The compression method '" + compression + "' is not supported (expected 'none', 'gzip', 'bzip2' or 'xz')" ) ; } if ( deb == null ) { throw new PackagingException ( "You need to specify where the deb file is supposed to be created." ) ; } getDigestCode ( digest ) ;
public class DynamicServerListLoadBalancer { /** * Update the AllServer list in the LoadBalancer if necessary and enabled * @ param ls */ protected void updateAllServerList ( List < T > ls ) { } }
// other threads might be doing this - in which case , we pass if ( serverListUpdateInProgress . compareAndSet ( false , true ) ) { try { for ( T s : ls ) { s . setAlive ( true ) ; // set so that clients can start using these // servers right away instead // of having to wait out the ping cycle . } setServersList ( ls ) ; super . forceQuickPing ( ) ; } finally { serverListUpdateInProgress . set ( false ) ; } }
public class StreamingKafkaSpecConsumer { /** * This method returns job specs receive from Kafka . It will block if there are no job specs . * @ return list of ( verb , jobspecs ) pairs . */ @ Override public Future < ? extends List < Pair < SpecExecutor . Verb , Spec > > > changedSpecs ( ) { } }
List < Pair < SpecExecutor . Verb , Spec > > changesSpecs = new ArrayList < > ( ) ; try { Pair < SpecExecutor . Verb , Spec > specPair = _jobSpecQueue . take ( ) ; _metrics . jobSpecDeqCount . incrementAndGet ( ) ; do { changesSpecs . add ( specPair ) ; // if there are more elements then pass them along in this call specPair = _jobSpecQueue . poll ( ) ; } while ( specPair != null ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } return new CompletedFuture ( changesSpecs , null ) ;
public class ClassUtility { /** * Return the method that exactly match the action name . The name must be unique into the class . * @ param cls the class which contain the searched method * @ param action the name of the method to find * @ return the method * @ throws NoSuchMethodException if no method was method */ public static Method getMethodByName ( final Class < ? > cls , final String action ) throws NoSuchMethodException { } }
for ( final Method m : cls . getMethods ( ) ) { if ( m . getName ( ) . equals ( action ) ) { return m ; } } throw new NoSuchMethodException ( action ) ;
public class JvmShutdownSafeguard { /** * Installs the safeguard shutdown hook . The maximum time that the JVM is allowed to spend * on shutdown before being killed is the given number of milliseconds . * @ param logger The logger to log errors to . * @ param delayMillis The delay ( in milliseconds ) to wait after clean shutdown was stared , * before forcibly terminating the JVM . */ public static void installAsShutdownHook ( Logger logger , long delayMillis ) { } }
checkArgument ( delayMillis >= 0 , "delay must be >= 0" ) ; // install the blocking shutdown hook Thread shutdownHook = new JvmShutdownSafeguard ( delayMillis ) ; ShutdownHookUtil . addShutdownHookThread ( shutdownHook , JvmShutdownSafeguard . class . getSimpleName ( ) , logger ) ;
public class AmazonAlexaForBusinessClient { /** * Lists conference providers under a specific AWS account . * @ param listConferenceProvidersRequest * @ return Result of the ListConferenceProviders operation returned by the service . * @ sample AmazonAlexaForBusiness . ListConferenceProviders * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / ListConferenceProviders " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListConferenceProvidersResult listConferenceProviders ( ListConferenceProvidersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListConferenceProviders ( request ) ;
public class TypeConverter { /** * Convert the passed source value to long * @ param aSrcValue * The source value . May be < code > null < / code > . * @ param nDefault * The default value to be returned if an error occurs during type * conversion . * @ return The converted value . * @ throws RuntimeException * If the converter itself throws an exception * @ see TypeConverterProviderBestMatch */ public static long convertToLong ( @ Nullable final Object aSrcValue , final long nDefault ) { } }
final Long aValue = convert ( aSrcValue , Long . class , null ) ; return aValue == null ? nDefault : aValue . longValue ( ) ;
public class HexableEncryptor { /** * { @ inheritDoc } * @ throws InvalidKeyException * the invalid key exception is thrown if initialization of the cypher object fails . * @ throws UnsupportedEncodingException * is thrown by get the byte array of the private key String object fails or if the * named charset is not supported . * @ throws NoSuchAlgorithmException * is thrown if instantiation of the cypher object fails . * @ throws NoSuchPaddingException * is thrown if instantiation of the cypher object fails . * @ throws IllegalBlockSizeException * is thrown if { @ link Cipher # doFinal ( byte [ ] ) } fails . * @ throws BadPaddingException * is thrown if { @ link Cipher # doFinal ( byte [ ] ) } fails . */ @ Override public String encrypt ( final String string ) throws InvalidKeyException , UnsupportedEncodingException , NoSuchAlgorithmException , NoSuchPaddingException , IllegalBlockSizeException , BadPaddingException { } }
final byte [ ] utf8 = string . getBytes ( "UTF-8" ) ; final byte [ ] encrypt = getModel ( ) . getCipher ( ) . doFinal ( utf8 ) ; final char [ ] original = Hex . encodeHex ( encrypt , false ) ; return new String ( original ) ;
public class Node { /** * clear counts for all direct dependents of this node . * also clear the dependent write counter */ protected void clearDependentsWriteCount ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "clearDependentsWriteCount entry: for this node: " + this ) ; } dependentWriteCount = 0 ; if ( ( dependents == null ) || ( dependents . size ( ) == 0 ) ) { return ; } for ( int i = 0 ; i < dependents . size ( ) ; i ++ ) { dependents . get ( i ) . setWriteCount ( 0 ) ; }
public class ConstraintAdapter { /** * Gets the chain of Control , staring from the given Control , leading to the given Interaction . * Use this method only if you are sure that there is a link from the control to conversion . * Otherwise a RuntimeException is thrown . This assumes that there is only one control chain * towards the interaction . It not , then one of the chains will be returned . * @ param control top level Control * @ param inter target Interaction * @ return Control chain controlling the Interaction */ protected List < Control > getControlChain ( Control control , Interaction inter ) { } }
LinkedList < Control > list = new LinkedList < Control > ( ) ; list . add ( control ) ; boolean found = search ( list , inter ) ; if ( ! found ) throw new RuntimeException ( "No link from Control to Conversion." ) ; return list ;
public class SQLUtils { /** * select count ( * ) from t _ table , 不包含where子句及以后的语句 * @ param clazz * @ return */ public static String getSelectCountSQL ( Class < ? > clazz ) { } }
StringBuilder sql = new StringBuilder ( ) ; sql . append ( "SELECT count(*)" ) ; // 处理join方式clazz JoinTable joinTable = DOInfoReader . getJoinTable ( clazz ) ; if ( joinTable != null ) { Field leftTableField = DOInfoReader . getJoinLeftTable ( clazz ) ; Field rightTableField = DOInfoReader . getJoinRightTable ( clazz ) ; JoinLeftTable joinLeftTable = leftTableField . getAnnotation ( JoinLeftTable . class ) ; JoinRightTable joinRightTable = rightTableField . getAnnotation ( JoinRightTable . class ) ; Table table1 = DOInfoReader . getTable ( leftTableField . getType ( ) ) ; Table table2 = DOInfoReader . getTable ( rightTableField . getType ( ) ) ; sql . append ( " FROM " ) . append ( getTableName ( table1 ) ) . append ( " " ) . append ( joinLeftTable . alias ( ) ) . append ( " " ) ; sql . append ( joinTable . joinType ( ) . getCode ( ) ) . append ( " " ) ; sql . append ( getTableName ( table2 ) ) . append ( " " ) . append ( joinRightTable . alias ( ) ) ; if ( joinTable . on ( ) == null || joinTable . on ( ) . trim ( ) . isEmpty ( ) ) { throw new OnConditionIsNeedException ( "join table VO:" + clazz . getName ( ) ) ; } sql . append ( " on " ) . append ( joinTable . on ( ) . trim ( ) ) ; } else { Table table = DOInfoReader . getTable ( clazz ) ; sql . append ( " FROM " ) . append ( getTableName ( table ) ) ; } return sql . toString ( ) ;
public class LdapTemplate { /** * { @ inheritDoc } */ @ Override public boolean authenticate ( String base , String filter , String password ) { } }
return authenticate ( LdapUtils . newLdapName ( base ) , filter , password , new NullAuthenticatedLdapEntryContextCallback ( ) , new NullAuthenticationErrorCallback ( ) ) ;
public class DescribeTasksResult { /** * Any failures associated with the call . * @ return Any failures associated with the call . */ public java . util . List < Failure > getFailures ( ) { } }
if ( failures == null ) { failures = new com . amazonaws . internal . SdkInternalList < Failure > ( ) ; } return failures ;
public class MSSQLConnectionFactory { /** * { @ inheritDoc } */ @ Override public WorkspaceStorageConnection openConnection ( boolean readOnly ) throws RepositoryException { } }
try { if ( this . containerConfig . dbStructureType . isMultiDatabase ( ) ) { return new MSSQLMultiDbJDBCConnection ( getJdbcConnection ( readOnly ) , readOnly , containerConfig ) ; } return new MSSQLSingleDbJDBCConnection ( getJdbcConnection ( readOnly ) , readOnly , containerConfig ) ; } catch ( SQLException e ) { throw new RepositoryException ( e ) ; }
public class ScreenshotState { /** * Verifies whether the state of the screenshot provided by stateProvider lambda function * is not changed within the given timeout . * @ param timeout timeout value * @ param minScore the value in range ( 0.0 , 1.0) * @ return self instance for chaining * @ throws ScreenshotComparisonTimeout if the calculated score is still * less than the given score after timeout happens * @ throws ScreenshotComparisonError if { @ link # remember ( ) } method has not been invoked yet */ public ScreenshotState verifyNotChanged ( Duration timeout , double minScore ) { } }
return checkState ( ( x ) -> x >= minScore , timeout ) ;
public class JoinPoint { /** * Shortcut method to create a JoinPoint waiting for the given synchronization points , < b > the JoinPoint is started by this method . < / b > * If some given synchronization points are null , they are just skipped . */ @ SafeVarargs public static < T extends Exception > JoinPoint < T > fromSynchronizationPointsSimilarError ( ISynchronizationPoint < T > ... synchPoints ) { } }
JoinPoint < T > jp = new JoinPoint < > ( ) ; for ( int i = 0 ; i < synchPoints . length ; ++ i ) if ( synchPoints [ i ] != null ) jp . addToJoin ( synchPoints [ i ] ) ; jp . start ( ) ; return jp ;
public class Admin { /** * @ throws PageException */ private void doGetTimeZones ( ) throws PageException { } }
String strLocale = getString ( "locale" , "english (united kingdom)" ) ; Locale locale = LocaleFactory . getLocale ( strLocale ) ; String [ ] timeZones = TimeZone . getAvailableIDs ( ) ; lucee . runtime . type . Query qry = new QueryImpl ( new String [ ] { "id" , "display" } , new String [ ] { "varchar" , "varchar" } , timeZones . length , "timezones" ) ; Arrays . sort ( timeZones ) ; TimeZone timeZone ; for ( int i = 0 ; i < timeZones . length ; i ++ ) { timeZone = TimeZone . getTimeZone ( timeZones [ i ] ) ; qry . setAt ( "id" , i + 1 , timeZones [ i ] ) ; qry . setAt ( "display" , i + 1 , timeZone . getDisplayName ( locale ) ) ; } pageContext . setVariable ( getString ( "admin" , action , "returnVariable" ) , qry ) ;
public class InternalServiceProviders { /** * Accessor for method . */ @ VisibleForTesting public static < T > Iterable < T > getCandidatesViaHardCoded ( Class < T > klass , Iterable < Class < ? > > hardcoded ) { } }
return ServiceProviders . getCandidatesViaHardCoded ( klass , hardcoded ) ;
public class ImmutableSubstitutionTools { /** * Returns a substitution theta ( if it exists ) such as : * theta ( s ) = t * with * s : source term * t : target term */ public Optional < ImmutableSubstitution < ImmutableTerm > > computeUnidirectionalSubstitution ( ImmutableTerm sourceTerm , ImmutableTerm targetTerm ) { } }
/* * Variable */ if ( sourceTerm instanceof Variable ) { Variable sourceVariable = ( Variable ) sourceTerm ; // Constraint if ( ( ! sourceVariable . equals ( targetTerm ) ) && ( targetTerm instanceof ImmutableFunctionalTerm ) && ( ( ImmutableFunctionalTerm ) targetTerm ) . getVariables ( ) . contains ( sourceVariable ) ) { return Optional . empty ( ) ; } ImmutableSubstitution < ImmutableTerm > substitution = substitutionFactory . getSubstitution ( ImmutableMap . of ( sourceVariable , targetTerm ) ) ; return Optional . of ( substitution ) ; } /* * Functional term */ else if ( sourceTerm instanceof ImmutableFunctionalTerm ) { if ( targetTerm instanceof ImmutableFunctionalTerm ) { return computeUnidirectionalSubstitutionOfFunctionalTerms ( ( ImmutableFunctionalTerm ) sourceTerm , ( ImmutableFunctionalTerm ) targetTerm ) ; } else { return Optional . empty ( ) ; } } /* * Constant */ else if ( sourceTerm . equals ( targetTerm ) ) { return Optional . of ( substitutionFactory . getSubstitution ( ) ) ; } else { return Optional . empty ( ) ; }
public class Client { /** * Retrieves state and metrics information for individual node . * @ param name node name * @ return node information */ public NodeInfo getNode ( String name ) { } }
final URI uri = uriWithPath ( "./nodes/" + encodePathSegment ( name ) ) ; return this . rt . getForObject ( uri , NodeInfo . class ) ;
public class ConstantValuePropertyAccessor { /** * Build a { @ link ConstantValuePropertyAccessor } , given a raw Object to cast and return . * @ param rawValue object value to use directly as the constant / default value * @ return a new instance of { @ link ConstantValuePropertyAccessor } */ public static ConstantValuePropertyAccessor fromRawValue ( Object rawValue ) { } }
checkNotNull ( rawValue ) ; return new ConstantValuePropertyAccessor ( Optional . of ( rawValue . toString ( ) ) , Optional . of ( rawValue ) ) ;
public class InsertBench { @ Bench public void blocked262144 ( ) throws TTException { } }
insert ( 262144 , true ) ; mTrx . close ( ) ; System . out . println ( "262144" ) ;
public class JDBCOutputFormat { /** * Adds a record to the prepared statement . * When this method is called , the output format is guaranteed to be opened . * @ param record The records to add to the output . * @ throws IOException Thrown , if the records could not be added due to an * I / O problem . */ @ Override public void writeRecord ( Record record ) throws IOException { } }
try { for ( int x = 0 ; x < record . getNumFields ( ) ; x ++ ) { Value temp = record . getField ( x , fieldClasses [ x ] ) ; addValue ( x + 1 , temp ) ; } upload . addBatch ( ) ; batchCount ++ ; if ( batchCount >= batchInterval ) { upload . executeBatch ( ) ; batchCount = 0 ; } } catch ( SQLException sqe ) { throw new IllegalArgumentException ( "writeRecord() failed:\t" , sqe ) ; } catch ( IllegalArgumentException iae ) { throw new IllegalArgumentException ( "writeRecord() failed:\t" , iae ) ; }