signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WhitelistWarningsGuard { /** * Loads legacy warnings list from the file . * @ return The lines of the file . */ protected static Set < String > loadWhitelistedJsWarnings ( CharSource supplier ) { } }
try { return loadWhitelistedJsWarnings ( supplier . openStream ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class KTypeStack { /** * / * # if ( $ TemplateOptions . KTypeGeneric ) */ @ SafeVarargs /* # end */ public static < KType > KTypeStack < KType > from ( KType ... elements ) { } }
final KTypeStack < KType > stack = new KTypeStack < KType > ( elements . length ) ; stack . push ( elements ) ; return stack ;
public class ColumnarArray { /** * TODO : this is extremely expensive . */ @ Override public Object [ ] array ( ) { } }
DataType dt = data . dataType ( ) ; Object [ ] list = new Object [ length ] ; try { for ( int i = 0 ; i < length ; i ++ ) { if ( ! data . isNullAt ( offset + i ) ) { list [ i ] = get ( i , dt ) ; } } return list ; } catch ( Exception e ) { throw new RuntimeException ( "Could not get the array" , e ) ; }
public class FileUtils { /** * Determines whether the parent directory contains the child element ( a file or directory ) * @ param directory the file to consider as the parent * @ param child the file to consider as the child * @ return true is the candidate leaf is under by the specified composite , otherwise false */ public static boolean directoryContains ( final File directory , final File child ) { } }
final File d = new File ( normalize ( directory . getAbsolutePath ( ) ) ) ; final File c = new File ( normalize ( child . getAbsolutePath ( ) ) ) ; if ( d . equals ( c ) ) { return false ; } else { return c . getPath ( ) . startsWith ( d . getPath ( ) ) ; }
public class ValidationWarning { /** * A description of the validation warning . * @ param warnings * A description of the validation warning . */ public void setWarnings ( java . util . Collection < String > warnings ) { } }
if ( warnings == null ) { this . warnings = null ; return ; } this . warnings = new com . amazonaws . internal . SdkInternalList < String > ( warnings ) ;
public class AsyncListEngine { /** * Main List Engine */ @ Override public void addOrUpdateItem ( T item ) { } }
synchronized ( LOCK ) { // Update memory cache cache . onObjectUpdated ( item . getEngineId ( ) , item ) ; List < T > items = new ArrayList < T > ( ) ; items . add ( item ) ; asyncStorageInt . addOrUpdateItems ( items ) ; for ( ListEngineDisplayListener < T > l : listeners ) { l . addOrUpdate ( item ) ; } }
public class ICalComponent { /** * Gets all experimental sub - component with a given name . * @ param name the component name ( case insensitive , e . g . " X - PARTY " ) * @ return the experimental components ( this list is immutable ) */ public List < RawComponent > getExperimentalComponents ( String name ) { } }
/* * Note : The returned list is not backed by the parent component because * this would allow RawComponent objects without the specified name to * be added to the list . */ List < RawComponent > toReturn = new ArrayList < RawComponent > ( ) ; for ( RawComponent component : getExperimentalComponents ( ) ) { if ( component . getName ( ) . equalsIgnoreCase ( name ) ) { toReturn . add ( component ) ; } } return Collections . unmodifiableList ( toReturn ) ;
public class Validator { /** * 验证是否为邮政编码 ( 中国 ) * @ param < T > 字符串类型 * @ param value 表单值 * @ param errorMsg 验证错误的信息 * @ return 验证后的值 * @ throws ValidateException 验证异常 */ public static < T extends CharSequence > T validateZipCode ( T value , String errorMsg ) throws ValidateException { } }
if ( false == isZipCode ( value ) ) { throw new ValidateException ( errorMsg ) ; } return value ;
public class IOUtils { /** * Writes a string to a temporary file , squashing exceptions * @ param contents The string to write * @ param path The file path * @ param encoding The encoding to encode in */ public static File writeStringToTempFileNoExceptions ( String contents , String path , String encoding ) { } }
OutputStream writer = null ; File tmp = null ; try { tmp = File . createTempFile ( path , ".tmp" ) ; if ( path . endsWith ( ".gz" ) ) { writer = new GZIPOutputStream ( new FileOutputStream ( tmp ) ) ; } else { writer = new BufferedOutputStream ( new FileOutputStream ( tmp ) ) ; } writer . write ( contents . getBytes ( encoding ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } finally { if ( writer != null ) { closeIgnoringExceptions ( writer ) ; } } return tmp ;
public class EventServiceImpl { /** * { @ inheritDoc } * The returned collection is unmodifiable and the method always returns a non - null collection . * @ param serviceName service name * @ param topic topic name * @ return a non - null immutable collection of listener registrations */ @ Override public Collection < EventRegistration > getRegistrations ( String serviceName , String topic ) { } }
EventServiceSegment segment = getSegment ( serviceName , false ) ; if ( segment == null ) { return Collections . emptySet ( ) ; } Collection < Registration > registrations = segment . getRegistrations ( topic , false ) ; if ( registrations == null || registrations . isEmpty ( ) ) { return Collections . < EventRegistration > emptySet ( ) ; } else { return Collections . < EventRegistration > unmodifiableCollection ( registrations ) ; }
public class XMLFactory { /** * Creates a { @ link XMLWriter } to write into the XML resource located by the specified * identifier . * @ param systemID location identifier of the XML resource * @ return XML writer * @ throws KNXMLException if creation of the writer failed or XML resource can ' t be * resolved */ public XMLWriter createXMLWriter ( String systemID ) throws KNXMLException { } }
final XMLWriter w = ( XMLWriter ) create ( DEFAULT_WRITER ) ; final EntityResolver res = ( EntityResolver ) create ( DEFAULT_RESOLVER ) ; final OutputStream os = res . resolveOutput ( systemID ) ; try { w . setOutput ( new OutputStreamWriter ( os , "UTF-8" ) , true ) ; return w ; } catch ( final UnsupportedEncodingException e ) { try { os . close ( ) ; } catch ( final IOException ignore ) { } throw new KNXMLException ( "encoding UTF-8 unknown" ) ; }
public class Cache { /** * Insert the value into the Cache using the specified key . */ public synchronized void insert ( Object key , Object value ) { } }
// evict until size < maxSize while ( isEvictionRequired ( ) && entryLimit > 0 && entryLimit < Integer . MAX_VALUE ) { evictStaleEntries ( ) ; } Entry curEntry = new Entry ( value ) ; Entry oldEntry = ( Entry ) primaryTable . put ( key , curEntry ) ; if ( oldEntry != null && oldEntry . value != null ) { ArrayList < Object > evictedValues = new ArrayList < Object > ( ) ; evictedValues . add ( oldEntry . value ) ; for ( CacheEvictionListener evictionCallback : cacheEvictionListenerSet ) { evictionCallback . evicted ( evictedValues ) ; } }
public class Similarity { /** * Computes < a href = " http : / / en . wikipedia . org / wiki / Kendall % 27s _ tau " > Kendall ' s * tau < / a > of the values in the two vectors . This method uses tau - b , which * is suitable for vectors with duplicate values . * @ throws IllegalArgumentException when the length of the two vectors are * not the same . */ public static double kendallsTau ( Vector a , Vector b ) { } }
return kendallsTau ( Vectors . asDouble ( a ) , Vectors . asDouble ( b ) ) ;
public class Instruction { /** * Get the Opcode for this instruction , or null if the instruction is * unrecognized . */ public Opcode getOpcode ( ) { } }
int b = getUnsignedByte ( 0 ) ; switch ( b ) { case Opcode . NONPRIV : case Opcode . PRIV : case Opcode . WIDE : return Opcode . get ( b , getUnsignedByte ( 1 ) ) ; } return Opcode . get ( b ) ;
public class TimelineModel { /** * Gets all overlapped events to the given one . The given and overlapped events belong to the same group . Events are ordered * by their start dates - first events with more recent start dates and then events with older start dates . If start dates are * equal , events will be ordered by their end dates . In this case , if an event has a null end date , it is ordered before the * event with a not null end date . * @ param event given event * @ return TreeSet < TimelineEvent > ordered overlapped events or null if no overlapping exist */ public TreeSet < TimelineEvent > getOverlappedEvents ( TimelineEvent event ) { } }
if ( event == null ) { return null ; } List < TimelineEvent > overlappedEvents = null ; for ( TimelineEvent e : events ) { if ( e . equals ( event ) ) { // given event should not be included continue ; } if ( event . getGroup ( ) == null && e . getGroup ( ) != null || ( event . getGroup ( ) != null && ! event . getGroup ( ) . equals ( e . getGroup ( ) ) ) ) { // ignore different groups continue ; } if ( isOverlapping ( event , e ) ) { if ( overlappedEvents == null ) { overlappedEvents = new ArrayList < > ( ) ; } overlappedEvents . add ( e ) ; } } if ( overlappedEvents == null ) { return null ; } // order overlapped events according to their start / end dates TreeSet < TimelineEvent > orderedOverlappedEvents = new TreeSet < > ( new TimelineEventComparator ( ) ) ; orderedOverlappedEvents . addAll ( overlappedEvents ) ; return orderedOverlappedEvents ;
public class FramesHandler { /** * Return all the frames . The Frames list will be instances of FrameSynopsisV3, * which only contains a few fields , for performance reasons . * @ see FrameSynopsisV3 */ @ SuppressWarnings ( "unused" ) // called through reflection by RequestServer public FramesListV3 list ( int version , FramesListV3 s ) { } }
Frames f = s . createAndFillImpl ( ) ; f . frames = Frame . fetchAll ( ) ; s . fillFromImplWithSynopsis ( f ) ; return s ;
public class LTPACrypto { /** * Set the key for RSA algorithms . * @ param key The key */ protected static final void setRSAKey ( byte [ ] [ ] key ) { } }
BigInteger [ ] k = new BigInteger [ 8 ] ; for ( int i = 0 ; i < 8 ; i ++ ) { if ( key [ i ] != null ) { k [ i ] = new BigInteger ( 1 , key [ i ] ) ; } } if ( k [ 3 ] . compareTo ( k [ 4 ] ) < 0 ) { BigInteger tmp ; tmp = k [ 3 ] ; k [ 3 ] = k [ 4 ] ; k [ 4 ] = tmp ; tmp = k [ 5 ] ; k [ 5 ] = k [ 6 ] ; k [ 6 ] = tmp ; k [ 7 ] = null ; } if ( k [ 7 ] == null ) { k [ 7 ] = k [ 4 ] . modInverse ( k [ 3 ] ) ; } if ( k [ 0 ] == null ) { k [ 0 ] = k [ 3 ] . multiply ( k [ 4 ] ) ; } if ( k [ 1 ] == null ) { k [ 1 ] = k [ 2 ] . modInverse ( k [ 3 ] . subtract ( BigInteger . valueOf ( 1 ) ) . multiply ( k [ 4 ] . subtract ( BigInteger . valueOf ( 1 ) ) ) ) ; } if ( k [ 5 ] == null ) { k [ 5 ] = k [ 1 ] . remainder ( k [ 3 ] . subtract ( BigInteger . valueOf ( 1 ) ) ) ; } if ( k [ 6 ] == null ) { k [ 6 ] = k [ 1 ] . remainder ( k [ 4 ] . subtract ( BigInteger . valueOf ( 1 ) ) ) ; } for ( int i = 0 ; i < 8 ; i ++ ) { key [ i ] = k [ i ] . toByteArray ( ) ; }
public class TypedProperties { /** * Equivalent to { @ link # setString ( String , String ) * setString } { @ code ( key . name ( ) , value ) } . * If { @ code key } is null , nothing is done . */ public void setString ( Enum < ? > key , String value ) { } }
if ( key == null ) { return ; } setString ( key . name ( ) , value ) ;
public class DoubleRange { /** * Check if the specified range overlaps with the range . * @ param range DoubleRange . * @ return True if the range overlaps with the range , otherwise returns false . */ public boolean IsOverlapping ( DoubleRange range ) { } }
return ( ( isInside ( range . min ) ) || ( isInside ( range . max ) ) || ( range . isInside ( min ) ) || ( range . isInside ( max ) ) ) ;
public class Choice5 { /** * { @ inheritDoc } */ @ Override public < F , G > Choice5 < A , B , C , F , G > biMap ( Function < ? super D , ? extends F > lFn , Function < ? super E , ? extends G > rFn ) { } }
return match ( Choice5 :: a , Choice5 :: b , Choice5 :: c , d -> d ( lFn . apply ( d ) ) , e -> e ( rFn . apply ( e ) ) ) ;
public class UpdateOptions { /** * Create a copy of the options instance . * @ return the copy */ public UpdateOptions copy ( ) { } }
return new UpdateOptions ( ) . bypassDocumentValidation ( getBypassDocumentValidation ( ) ) . collation ( getCollation ( ) ) . multi ( isMulti ( ) ) . upsert ( isUpsert ( ) ) . writeConcern ( getWriteConcern ( ) ) ;
public class GosuStringUtil { /** * Finds the index for all disjoint ( non - overlapping ) occurances of the substringToLookFor in the string . */ public static ArrayList < Integer > findDistinctIndexesOf ( String string , String substringToLookFor ) { } }
ArrayList < Integer > positions = new ArrayList < Integer > ( ) ; if ( GosuStringUtil . isEmpty ( substringToLookFor ) ) { return positions ; } int i = 0 ; i = GosuStringUtil . indexOf ( string , substringToLookFor , i ) ; while ( i != - 1 ) { positions . add ( i ) ; i += substringToLookFor . length ( ) ; i = GosuStringUtil . indexOf ( string , substringToLookFor , i ) ; } return positions ;
public class AsynchronousRequest { /** * For more info on Account API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / account " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param API API key * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception invalid API key * @ throws NullPointerException if given { @ link Callback } is empty * @ see AchievementProgression Account achievement info */ public void getAchievementProgression ( String API , Callback < List < AchievementProgression > > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ParamType . API , API ) ) ; gw2API . getAchievementProgression ( API ) . enqueue ( callback ) ;
public class Job { /** * / * package */ boolean meetsRequirements ( boolean checkRequirementsEnforced ) { } }
if ( checkRequirementsEnforced && ! getParams ( ) . getRequest ( ) . requirementsEnforced ( ) ) { return true ; } if ( ! isRequirementChargingMet ( ) ) { CAT . w ( "Job requires charging, reschedule" ) ; return false ; } if ( ! isRequirementDeviceIdleMet ( ) ) { CAT . w ( "Job requires device to be idle, reschedule" ) ; return false ; } if ( ! isRequirementNetworkTypeMet ( ) ) { CAT . w ( "Job requires network to be %s, but was %s" , getParams ( ) . getRequest ( ) . requiredNetworkType ( ) , Device . getNetworkType ( getContext ( ) ) ) ; return false ; } if ( ! isRequirementBatteryNotLowMet ( ) ) { CAT . w ( "Job requires battery not be low, reschedule" ) ; return false ; } if ( ! isRequirementStorageNotLowMet ( ) ) { CAT . w ( "Job requires storage not be low, reschedule" ) ; return false ; } return true ;
public class ListUtil { /** * Add the given element in the main list using a dichotomic algorithm if the element is not already present . * < p > This function ensure that the comparator is invoked as : < code > comparator ( data , dataAlreadyInList ) < / code > . * @ param < E > is the type of the elements in the list . * @ param list is the list to change . * @ param comparator is the comparator of elements . * @ param data is the data to insert . * @ return the index where the element was inserted , or < code > - 1 < / code > * if the element was not inserted . * @ since 14.0 */ @ Inline ( value = "add($1, $2, $3, false, false)" ) public static < E > int addIfAbsent ( List < E > list , Comparator < ? super E > comparator , E data ) { } }
return add ( list , comparator , data , false , false ) ;
public class FaceletViewDeclarationLanguage { /** * { @ inheritDoc } */ @ Override public StateManagementStrategy getStateManagementStrategy ( FacesContext context , String viewId ) { } }
// Use partial state saving strategy only if javax . faces . PARTIAL _ STATE _ SAVING is " true " and // the current view is not on javax . faces . FULL _ STATE _ SAVING _ VIEW _ IDS . if ( _partialStateSaving && _stateMgmtStrategy == null ) { _stateMgmtStrategy = new DefaultFaceletsStateManagementStrategy ( context ) ; } return _usePartialStateSavingOnThisView ( viewId ) ? _stateMgmtStrategy : null ;
public class LanguageUtils { /** * Approves a translation for a given language . * @ param appid appid name of the { @ link com . erudika . para . core . App } * @ param langCode the 2 - letter language code * @ param key the translation key * @ param value the translated string * @ return true if the operation was successful */ public boolean approveTranslation ( String appid , String langCode , String key , String value ) { } }
if ( StringUtils . isBlank ( langCode ) || key == null || value == null || getDefaultLanguageCode ( ) . equals ( langCode ) ) { return false ; } Sysprop s = dao . read ( appid , keyPrefix . concat ( langCode ) ) ; boolean create = false ; if ( s == null ) { create = true ; s = new Sysprop ( keyPrefix . concat ( langCode ) ) ; s . setAppid ( appid ) ; } s . addProperty ( key , value ) ; if ( create ) { dao . create ( appid , s ) ; } else { dao . update ( appid , s ) ; } if ( LANG_CACHE . containsKey ( langCode ) ) { LANG_CACHE . get ( langCode ) . put ( key , value ) ; } updateTranslationProgressMap ( appid , langCode , PLUS ) ; return true ;
public class ConvexPolygon { /** * Returns minimum distance from inside point ( x0 , y0 ) to edge . * @ param x0 * @ param y0 * @ return */ public double getMinimumDistance ( double x0 , double y0 ) { } }
double min = Double . MAX_VALUE ; int rows = points . numRows ; double [ ] d = points . data ; double x1 = d [ 2 * ( rows - 1 ) ] ; double y1 = d [ 2 * ( rows - 1 ) + 1 ] ; for ( int r = 0 ; r < rows ; r ++ ) { double x2 = d [ 2 * r ] ; double y2 = d [ 2 * r + 1 ] ; min = Math . min ( min , distanceFromLine ( x0 , y0 , x1 , y1 , x2 , y2 ) ) ; x1 = x2 ; y1 = y2 ; } if ( min > 0 && ! isInside ( x0 , y0 ) ) { throw new IllegalArgumentException ( "point not inside convex polygon" ) ; } return min ;
public class ProcessorNamespaceAlias { /** * Receive notification of the start of an xsl : namespace - alias element . * @ param handler The calling StylesheetHandler / TemplatesBuilder . * @ param uri The Namespace URI , or the empty string if the * element has no Namespace URI or if Namespace * processing is not being performed . * @ param localName The local name ( without prefix ) , or the * empty string if Namespace processing is not being * performed . * @ param rawName The raw XML 1.0 name ( with prefix ) , or the * empty string if raw names are not available . * @ param attributes The attributes attached to the element . If * there are no attributes , it shall be an empty * Attributes object . */ public void startElement ( StylesheetHandler handler , String uri , String localName , String rawName , Attributes attributes ) throws org . xml . sax . SAXException { } }
final String resultNS ; NamespaceAlias na = new NamespaceAlias ( handler . nextUid ( ) ) ; setPropertiesFromAttributes ( handler , rawName , attributes , na ) ; String prefix = na . getStylesheetPrefix ( ) ; if ( prefix . equals ( "#default" ) ) { prefix = "" ; na . setStylesheetPrefix ( prefix ) ; } String stylesheetNS = handler . getNamespaceForPrefix ( prefix ) ; na . setStylesheetNamespace ( stylesheetNS ) ; prefix = na . getResultPrefix ( ) ; if ( prefix . equals ( "#default" ) ) { prefix = "" ; na . setResultPrefix ( prefix ) ; resultNS = handler . getNamespaceForPrefix ( prefix ) ; if ( null == resultNS ) handler . error ( XSLTErrorResources . ER_INVALID_NAMESPACE_URI_VALUE_FOR_RESULT_PREFIX_FOR_DEFAULT , null , null ) ; } else { resultNS = handler . getNamespaceForPrefix ( prefix ) ; if ( null == resultNS ) handler . error ( XSLTErrorResources . ER_INVALID_NAMESPACE_URI_VALUE_FOR_RESULT_PREFIX , new Object [ ] { prefix } , null ) ; } na . setResultNamespace ( resultNS ) ; handler . getStylesheet ( ) . setNamespaceAlias ( na ) ; handler . getStylesheet ( ) . appendChild ( na ) ;
public class NativeCodeLoader { /** * Loads a native library from a file . * @ param directory * the directory in which the library is supposed to be located * @ param filename * filename of the library to be loaded * @ throws IOException * thrown if an internal native library cannot be extracted */ public static void loadLibraryFromFile ( final String directory , final String filename ) throws IOException { } }
final String libraryPath = directory + File . separator + filename ; synchronized ( loadedLibrarySet ) { final File outputFile = new File ( directory , filename ) ; if ( ! outputFile . exists ( ) ) { // Try to extract the library from the system resources final ClassLoader cl = ClassLoader . getSystemClassLoader ( ) ; final InputStream in = cl . getResourceAsStream ( JAR_PREFIX + filename ) ; if ( in == null ) { throw new IOException ( "Unable to extract native library " + filename + " to " + directory ) ; } final OutputStream out = new FileOutputStream ( outputFile ) ; copy ( in , out ) ; } System . load ( libraryPath ) ; loadedLibrarySet . add ( filename ) ; }
public class CmsVfsDiskCache { /** * Saves the given file content to a RFS file of the given name ( full path ) . < p > * If the required parent folders do not exists , they are also created . < p > * @ param rfsName the RFS name of the file to save the content in * @ param content the content of the file to save * @ return a reference to the File that was saved * @ throws IOException in case of disk access errors */ public static File saveFile ( String rfsName , byte [ ] content ) throws IOException { } }
File f = new File ( rfsName ) ; File p = f . getParentFile ( ) ; if ( ! p . exists ( ) ) { // create parent folders p . mkdirs ( ) ; } // write file contents FileOutputStream fs = new FileOutputStream ( f ) ; fs . write ( content ) ; fs . close ( ) ; return f ;
public class MediaManagementApi { /** * Get the content of snapshot specified by startIndex and size . * Get the content of snapshot specified by startIndex and size . * @ param snapshotId Id of the snapshot ( required ) * @ param getSnapshotContentData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > getSnapshotContentWithHttpInfo ( String snapshotId , GetSnapshotContentData getSnapshotContentData ) throws ApiException { } }
com . squareup . okhttp . Call call = getSnapshotContentValidateBeforeCall ( snapshotId , getSnapshotContentData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class SerializerH3Enum { /** * Introspect the class . */ @ Override public void introspect ( ContextH3 context ) { } }
if ( _infoRef . get ( ) != null ) { return ; } _ctor = introspectConstructor ( ) ; FieldInfoH3 [ ] fieldInfo = new FieldInfoH3 [ 1 ] ; fieldInfo [ 0 ] = new FieldInfoH3 ( "name" ) ; ClassInfoH3 classInfo = new ClassInfoH3 ( _type . getName ( ) , ClassTypeH3 . CLASS , fieldInfo ) ; _infoRef . compareAndSet ( null , classInfo ) ;
public class AnalysisRecord { /** * Add this key area description to the Record . */ public KeyArea setupKey ( int iKeyArea ) { } }
KeyArea keyArea = null ; if ( iKeyArea == kIDKey ) { keyArea = this . makeIndex ( DBConstants . UNIQUE , "PrimaryKey" ) ; keyArea . addKeyField ( kID , DBConstants . ASCENDING ) ; } if ( keyArea == null ) { keyArea = super . setupKey ( iKeyArea ) ; if ( keyArea == null ) if ( iKeyArea < kAnalysisRecordLastKey ) keyArea = new EmptyKey ( this ) ; } return keyArea ;
public class HMACConfidentialKey { /** * Computes the message authentication code for the specified byte sequence . */ public synchronized byte [ ] mac ( byte [ ] message ) { } }
ConfidentialStore cs = ConfidentialStore . get ( ) ; if ( mac == null || cs != lastCS ) { lastCS = cs ; mac = createMac ( ) ; } return chop ( mac . doFinal ( message ) ) ;
public class IsProbablyBinary { /** * This method implements a simple heuristic for guessing whether a page is binary . * < P > The first { @ link # BINARY _ CHECK _ SCAN _ LENGTH } bytes are scanned : if we find more than * { @ link # THRESHOLD } zeroes , we deduce that this page is binary . Note that this works * also with UTF - 8 , as no UTF - 8 legal character encoding contains these characters ( unless * you ' re encoding 0 , but this is not our case ) . * @ return < code > true < / code > if this page has most probably a binary content . * @ throws NullPointerException if the page has no byte content . */ @ Override public boolean apply ( final HttpResponse httpResponse ) { } }
try { final InputStream content = httpResponse . getEntity ( ) . getContent ( ) ; int count = 0 ; for ( int i = BINARY_CHECK_SCAN_LENGTH ; i -- != 0 ; ) { final int b = content . read ( ) ; if ( b == - 1 ) return false ; if ( b == 0 && ++ count == THRESHOLD ) return true ; } } catch ( final IOException shouldntReallyHappen ) { throw new RuntimeException ( shouldntReallyHappen ) ; } return false ;
public class FessMessages { /** * Add the created action message for the key ' errors . property _ type _ integer ' with parameters . * < pre > * message : { 0 } should be numeric . * < / pre > * @ param property The property name for the message . ( NotNull ) * @ param arg0 The parameter arg0 for message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addErrorsPropertyTypeInteger ( String property , String arg0 ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( ERRORS_property_type_integer , arg0 ) ) ; return this ;
public class BeanUtil { /** * 获取 { @ link BeanDesc } Bean描述信息 * @ param clazz Bean类 * @ return { @ link BeanDesc } * @ since 3.1.2 */ public static BeanDesc getBeanDesc ( Class < ? > clazz ) { } }
BeanDesc beanDesc = BeanDescCache . INSTANCE . getBeanDesc ( clazz ) ; if ( null == beanDesc ) { beanDesc = new BeanDesc ( clazz ) ; BeanDescCache . INSTANCE . putBeanDesc ( clazz , beanDesc ) ; } return beanDesc ;
public class FontUtils { /** * Gets the specified font with the specified style and size , correctly scaled * @ param style * @ param size * @ since 2.7.0 * @ return */ public static Font getFont ( Font font , int style , Size size ) { } }
return getFont ( font , size ) . deriveFont ( style ) ;
public class OrderItemUrl { /** * Get Resource Url for DeleteQuoteItem * @ param quoteId * @ param quoteItemId * @ return String Resource Url */ public static MozuUrl deleteQuoteItemUrl ( String quoteId , String quoteItemId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/quotes/{quoteId}/items/{quoteItemId}" ) ; formatter . formatUrl ( "quoteId" , quoteId ) ; formatter . formatUrl ( "quoteItemId" , quoteItemId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class ProxyConnectionPool { /** * Called when the IO thread has completed a successful request * @ param connectionHolder The client connection holder */ private void returnConnection ( final ConnectionHolder connectionHolder ) { } }
ClientStatistics stats = connectionHolder . clientConnection . getStatistics ( ) ; this . requestCount . incrementAndGet ( ) ; if ( stats != null ) { // we update the stats when the connection is closed this . read . addAndGet ( stats . getRead ( ) ) ; this . written . addAndGet ( stats . getWritten ( ) ) ; stats . reset ( ) ; } HostThreadData hostData = getData ( ) ; if ( closed ) { // the host has been closed IoUtils . safeClose ( connectionHolder . clientConnection ) ; ConnectionHolder con = hostData . availableConnections . poll ( ) ; while ( con != null ) { IoUtils . safeClose ( con . clientConnection ) ; con = hostData . availableConnections . poll ( ) ; } redistributeQueued ( hostData ) ; return ; } // only do something if the connection is open . If it is closed then // the close setter will handle creating a new connection and decrementing // the connection count final ClientConnection connection = connectionHolder . clientConnection ; if ( connection . isOpen ( ) && ! connection . isUpgraded ( ) ) { CallbackHolder callback = hostData . awaitingConnections . poll ( ) ; while ( callback != null && callback . isCancelled ( ) ) { callback = hostData . awaitingConnections . poll ( ) ; } if ( callback != null ) { if ( callback . getTimeoutKey ( ) != null ) { callback . getTimeoutKey ( ) . remove ( ) ; } // Anything waiting for a connection is not expecting exclusivity . connectionReady ( connectionHolder , callback . getCallback ( ) , callback . getExchange ( ) , false ) ; } else { final int cachedConnectionCount = hostData . availableConnections . size ( ) ; if ( cachedConnectionCount >= maxCachedConnections ) { // Close the longest idle connection instead of the current one final ConnectionHolder holder = hostData . availableConnections . poll ( ) ; if ( holder != null ) { IoUtils . safeClose ( holder . clientConnection ) ; } } hostData . availableConnections . add ( connectionHolder ) ; // If the soft max and ttl are configured if ( timeToLive > 0 ) { // we only start the timeout process once we have hit the core pool size // otherwise connections could start timing out immediately once the core pool size is hit // and if we never hit the core pool size then it does not make sense to start timers which are never // used ( as timers are expensive ) final long currentTime = System . currentTimeMillis ( ) ; connectionHolder . timeout = currentTime + timeToLive ; if ( hostData . availableConnections . size ( ) > coreCachedConnections ) { if ( hostData . nextTimeout <= 0 ) { hostData . timeoutKey = WorkerUtils . executeAfter ( connection . getIoThread ( ) , hostData . timeoutTask , timeToLive , TimeUnit . MILLISECONDS ) ; hostData . nextTimeout = connectionHolder . timeout ; } } } } } else if ( connection . isOpen ( ) && connection . isUpgraded ( ) ) { // we treat upgraded connections as closed // as we do not want the connection pool filled with upgraded connections // if the connection is actually closed the close setter will handle it connection . getCloseSetter ( ) . set ( null ) ; handleClosedConnection ( hostData , connectionHolder ) ; }
public class APIKeysInner { /** * Delete an API Key of an Application Insights component . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ param keyId The API Key ID . This is unique within a Application Insights component . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ApplicationInsightsComponentAPIKeyInner object */ public Observable < ApplicationInsightsComponentAPIKeyInner > deleteAsync ( String resourceGroupName , String resourceName , String keyId ) { } }
return deleteWithServiceResponseAsync ( resourceGroupName , resourceName , keyId ) . map ( new Func1 < ServiceResponse < ApplicationInsightsComponentAPIKeyInner > , ApplicationInsightsComponentAPIKeyInner > ( ) { @ Override public ApplicationInsightsComponentAPIKeyInner call ( ServiceResponse < ApplicationInsightsComponentAPIKeyInner > response ) { return response . body ( ) ; } } ) ;
public class ErrorRootCauseEntityMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ErrorRootCauseEntity errorRootCauseEntity , ProtocolMarshaller protocolMarshaller ) { } }
if ( errorRootCauseEntity == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( errorRootCauseEntity . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( errorRootCauseEntity . getExceptions ( ) , EXCEPTIONS_BINDING ) ; protocolMarshaller . marshall ( errorRootCauseEntity . getRemote ( ) , REMOTE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PropertyNotUniqueException { /** * Thrown when trying to create a { @ link SchemaConcept } using a unique property which is already taken . * For example this happens when using an already taken { @ link Label } */ public static PropertyNotUniqueException cannotCreateProperty ( Concept concept , Schema . VertexProperty property , Object value ) { } }
return create ( UNIQUE_PROPERTY_TAKEN . getMessage ( property . name ( ) , value , concept ) ) ;
public class CacheManager { /** * { @ inheritDoc } */ @ Override public void removeAllDataFromCache ( ) { } }
for ( Persister persister : this . listPersister ) { if ( persister instanceof CacheCleaner ) { ( ( CacheCleaner ) persister ) . removeAllDataFromCache ( ) ; } if ( persister instanceof ObjectPersisterFactory ) { ObjectPersisterFactory factory = ( ObjectPersisterFactory ) persister ; List < ObjectPersister < ? > > listPersisterForFactory = mapFactoryToPersister . get ( factory ) ; for ( ObjectPersister < ? > objectPersister : listPersisterForFactory ) { objectPersister . removeAllDataFromCache ( ) ; } } }
public class DefaultInterval { /** * ( non - Javadoc ) * @ see org . virginia . pbhs . parameters . Interval # getMaximumStart ( ) */ @ Override public Long getMaximumStart ( ) { } }
calculator ( ) ; if ( simple ) { return v [ 1 ] ; } else { Weight maxStart = cn . getMaximumStart ( ) ; return maxStart . isInfinity ( ) ? null : maxStart . value ( ) ; }
public class LatLongUtils { /** * Calculates geodetic distance between two LatLongs using Vincenty inverse formula * for ellipsoids . This is very accurate but consumes more resources and time than the * sphericalDistance method . * Adaptation of Chriss Veness ' JavaScript Code on * http : / / www . movable - type . co . uk / scripts / latlong - vincenty . html * Paper : Vincenty inverse formula - T Vincenty , " Direct and Inverse Solutions of Geodesics * on the Ellipsoid with application of nested equations " , Survey Review , vol XXII no 176, * 1975 ( http : / / www . ngs . noaa . gov / PUBS _ LIB / inverse . pdf ) * @ param latLong1 first LatLong * @ param latLong2 second LatLong * @ return distance in meters between points as a double */ public static double vincentyDistance ( LatLong latLong1 , LatLong latLong2 ) { } }
double f = 1 / LatLongUtils . INVERSE_FLATTENING ; double L = Math . toRadians ( latLong2 . getLongitude ( ) - latLong1 . getLongitude ( ) ) ; double U1 = Math . atan ( ( 1 - f ) * Math . tan ( Math . toRadians ( latLong1 . getLatitude ( ) ) ) ) ; double U2 = Math . atan ( ( 1 - f ) * Math . tan ( Math . toRadians ( latLong2 . getLatitude ( ) ) ) ) ; double sinU1 = Math . sin ( U1 ) , cosU1 = Math . cos ( U1 ) ; double sinU2 = Math . sin ( U2 ) , cosU2 = Math . cos ( U2 ) ; double lambda = L , lambdaP , iterLimit = 100 ; double cosSqAlpha = 0 , sinSigma = 0 , cosSigma = 0 , cos2SigmaM = 0 , sigma = 0 , sinLambda = 0 , sinAlpha = 0 , cosLambda = 0 ; do { sinLambda = Math . sin ( lambda ) ; cosLambda = Math . cos ( lambda ) ; sinSigma = Math . sqrt ( ( cosU2 * sinLambda ) * ( cosU2 * sinLambda ) + ( cosU1 * sinU2 - sinU1 * cosU2 * cosLambda ) * ( cosU1 * sinU2 - sinU1 * cosU2 * cosLambda ) ) ; if ( sinSigma == 0 ) return 0 ; // co - incident points cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda ; sigma = Math . atan2 ( sinSigma , cosSigma ) ; sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma ; cosSqAlpha = 1 - sinAlpha * sinAlpha ; if ( cosSqAlpha != 0 ) { cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha ; } else { cos2SigmaM = 0 ; } double C = f / 16 * cosSqAlpha * ( 4 + f * ( 4 - 3 * cosSqAlpha ) ) ; lambdaP = lambda ; lambda = L + ( 1 - C ) * f * sinAlpha * ( sigma + C * sinSigma * ( cos2SigmaM + C * cosSigma * ( - 1 + 2 * cos2SigmaM * cos2SigmaM ) ) ) ; } while ( Math . abs ( lambda - lambdaP ) > 1e-12 && -- iterLimit > 0 ) ; if ( iterLimit == 0 ) return 0 ; // formula failed to converge double uSq = cosSqAlpha * ( Math . pow ( LatLongUtils . EQUATORIAL_RADIUS , 2 ) - Math . pow ( LatLongUtils . POLAR_RADIUS , 2 ) ) / Math . pow ( LatLongUtils . POLAR_RADIUS , 2 ) ; double A = 1 + uSq / 16384 * ( 4096 + uSq * ( - 768 + uSq * ( 320 - 175 * uSq ) ) ) ; double B = uSq / 1024 * ( 256 + uSq * ( - 128 + uSq * ( 74 - 47 * uSq ) ) ) ; double deltaSigma = B * sinSigma * ( cos2SigmaM + B / 4 * ( cosSigma * ( - 1 + 2 * cos2SigmaM * cos2SigmaM ) - B / 6 * cos2SigmaM * ( - 3 + 4 * sinSigma * sinSigma ) * ( - 3 + 4 * cos2SigmaM * cos2SigmaM ) ) ) ; double s = LatLongUtils . POLAR_RADIUS * A * ( sigma - deltaSigma ) ; return s ;
public class LocalDateTime { /** * Returns a copy of this datetime plus the specified number of minutes . * This LocalDateTime instance is immutable and unaffected by this method call . * The following three lines are identical in effect : * < pre > * LocalDateTime added = dt . plusMinutes ( 6 ) ; * LocalDateTime added = dt . plus ( Period . minutes ( 6 ) ) ; * LocalDateTime added = dt . withFieldAdded ( DurationFieldType . minutes ( ) , 6 ) ; * < / pre > * @ param minutes the amount of minutes to add , may be negative * @ return the new LocalDateTime plus the increased minutes */ public LocalDateTime plusMinutes ( int minutes ) { } }
if ( minutes == 0 ) { return this ; } long instant = getChronology ( ) . minutes ( ) . add ( getLocalMillis ( ) , minutes ) ; return withLocalMillis ( instant ) ;
public class QuoteMetaCharacters { /** * Quote metacharacters in the text . */ public void process ( ) throws IOException { } }
int pos = 0 ; do { int meta = findNextMeta ( text , pos ) ; if ( meta >= 0 ) { emitLiteral ( text . substring ( pos , meta ) ) ; emitLiteral ( map . getReplacement ( text . substring ( meta , meta + 1 ) ) ) ; pos = meta + 1 ; } else { emitLiteral ( text . substring ( pos , text . length ( ) ) ) ; pos = text . length ( ) ; } } while ( pos < text . length ( ) ) ;
public class CalendarAccessor { /** * ( non - Javadoc ) * @ see com . impetus . kundera . property . PropertyAccessor # fromBytes ( byte [ ] ) */ @ Override public Calendar fromBytes ( Class targetClass , byte [ ] b ) { } }
Calendar cal = Calendar . getInstance ( ) ; Date d = new Date ( ) ; if ( b == null ) { return null ; } LongAccessor longAccessor = new LongAccessor ( ) ; d . setTime ( longAccessor . fromBytes ( targetClass , b ) ) ; cal . setTime ( d ) ; return cal ;
public class PepXmlParser { /** * This entry point may be used to test if JAXB is functioning properly after all * the java packaging and minification . < br / > * To run use ` java - cp path - to . jar umich . ms . fileio . filetypes . pepxml . PepXmlParser ` . < br / > * @ param args List of existing filesystem paths . */ public static void main ( String [ ] args ) throws FileParsingException { } }
if ( args . length == 0 ) System . err . println ( "Supply arguments that are paths to pepxml files" ) ; List < Path > paths = Arrays . stream ( args ) . map ( s -> Paths . get ( s ) ) . collect ( Collectors . toList ( ) ) ; if ( paths . stream ( ) . anyMatch ( p -> ! Files . exists ( p ) ) ) System . err . println ( "Not all input files exist" ) ; for ( Path p : paths ) { System . out . println ( "\nParsing: " + p . toString ( ) ) ; MsmsPipelineAnalysis pepxml = PepXmlParser . parse ( p ) ; int sum = pepxml . getMsmsRunSummary ( ) . stream ( ) . flatMapToInt ( msmsRunSummary -> msmsRunSummary . getSpectrumQuery ( ) . stream ( ) . flatMapToInt ( spectrumQuery -> spectrumQuery . getSearchResult ( ) . stream ( ) . mapToInt ( value -> value . getSearchHit ( ) . size ( ) ) ) ) . sum ( ) ; System . out . printf ( "Done, found %d PSMs\n" , sum ) ; }
public class GenericRevisionInfo { /** * The deployment groups for which this is the current target revision . * @ param deploymentGroups * The deployment groups for which this is the current target revision . */ public void setDeploymentGroups ( java . util . Collection < String > deploymentGroups ) { } }
if ( deploymentGroups == null ) { this . deploymentGroups = null ; return ; } this . deploymentGroups = new com . amazonaws . internal . SdkInternalList < String > ( deploymentGroups ) ;
public class AngularMomentum { /** * Calculates the Iy operator */ public IMatrix getIy ( ) { } }
return ( new IMatrix ( getIplus ( ) . sub ( getIminus ( ) ) ) ) . mul ( new Complex ( 0d , 1d ) ) . mul ( new Complex ( 0.5 , 0d ) ) ;
public class NumericalValueRange { /** * The build method for creating { @ link NumericalValueRange } objects . The specified < i > min < / i > and < i > max < / i > * parameters are going to be used for initializing the { @ link # min } and { @ link # max } variables of the new * { @ link NumericalValueRange } . Since this only makes sense if < i > min < / i > & le ; < i > max < / i > , < code > null < / code > will be * returned if this requirement is violated . * @ param min the lower bound * @ param max the upper bound * @ return a { @ link NumericalValueRange } representing the specified interval or < code > null < / code > */ public static final NumericalValueRange create ( final int min , final int max ) { } }
if ( min > max ) return null ; return new NumericalValueRange ( min , max ) ;
public class ConnMetaCodeGen { /** * Output Metadata class * @ param def definition * @ param out Writer * @ throws IOException ioException */ @ Override public void writeClassBody ( Definition def , Writer out ) throws IOException { } }
out . write ( "public class " + getClassName ( def ) + " implements ConnectionMetaData" ) ; writeLeftCurlyBracket ( out , 0 ) ; int indent = 1 ; writeDefaultConstructor ( def , out , indent ) ; writeEIS ( def , out , indent ) ; writeUsername ( def , out , indent ) ; writeRightCurlyBracket ( out , 0 ) ;
public class TemporalProperty { /** * Removes values no longer in effect at the specified point in time . * @ param limit */ public void purge ( Instant limit ) { } }
Instant firstKeeper = values . floorKey ( limit ) ; if ( firstKeeper == null ) return ; Set < Instant > purgables = values . headMap ( firstKeeper ) . keySet ( ) ; if ( purgables == null ) return ; for ( Instant purgable : purgables ) { values . remove ( purgable ) ; }
public class ClassFile { /** * Writes the ClassFile to the given DataOutput . */ public void writeTo ( DataOutput dout ) throws IOException { } }
dout . writeInt ( MAGIC ) ; dout . writeInt ( mVersion ) ; mCp . writeTo ( dout ) ; { int flags = mModifiers . getBitmask ( ) ; if ( ! mModifiers . isInterface ( ) ) { // Set the ACC _ SUPER flag for classes only . flags |= Modifier . SYNCHRONIZED ; } dout . writeShort ( flags ) ; } dout . writeShort ( mThisClass . getIndex ( ) ) ; if ( mSuperClass != null ) { dout . writeShort ( mSuperClass . getIndex ( ) ) ; } else { dout . writeShort ( 0 ) ; } int size = mInterfaces . size ( ) ; if ( size > 65535 ) { throw new IllegalStateException ( "Interfaces count cannot exceed 65535: " + size ) ; } dout . writeShort ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { int index = mInterfaces . get ( i ) . getIndex ( ) ; dout . writeShort ( index ) ; } size = mFields . size ( ) ; if ( size > 65535 ) { throw new IllegalStateException ( "Field count cannot exceed 65535: " + size ) ; } dout . writeShort ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { FieldInfo field = mFields . get ( i ) ; field . writeTo ( dout ) ; } size = mMethods . size ( ) ; if ( size > 65535 ) { throw new IllegalStateException ( "Method count cannot exceed 65535: " + size ) ; } dout . writeShort ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { MethodInfo method = mMethods . get ( i ) ; method . writeTo ( dout ) ; } size = mAttributes . size ( ) ; if ( size > 65535 ) { throw new IllegalStateException ( "Attribute count cannot exceed 65535: " + size ) ; } dout . writeShort ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { Attribute attr = mAttributes . get ( i ) ; attr . writeTo ( dout ) ; }
public class ConstraintFactory { /** * Generates a new constraint instance using the given constructor , the element and the * generateor callback as parameters . */ private Constraint getObject ( final MathRandom random , final Element element , final Constructor < ? extends Constraint > constructor ) throws IllegalArgumentException , InstantiationException , IllegalAccessException , InvocationTargetException { } }
LOG . debug ( "Creating constraint: " + element . getAttributes ( ) ) ; Constraint instance = constructor . newInstance ( new Object [ ] { random , element , generator } ) ; injector . injectMembers ( instance ) ; return instance ;
public class SarlBehaviorBuilderImpl { /** * Add a modifier . * @ param modifier the modifier to add . */ public void addModifier ( String modifier ) { } }
if ( ! Strings . isEmpty ( modifier ) ) { this . sarlBehavior . getModifiers ( ) . add ( modifier ) ; }
public class ManagedClustersInner { /** * Gets cluster user credential of a managed cluster . * Gets cluster user credential of the managed cluster with a specified resource group and name . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the CredentialResultsInner object if successful . */ public CredentialResultsInner listClusterUserCredentials ( String resourceGroupName , String resourceName ) { } }
return listClusterUserCredentialsWithServiceResponseAsync ( resourceGroupName , resourceName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ImplEnhanceHistogram { /** * Local equalization along a column . Image must be at least the histogram ' s width ( 2 * r + 1 ) in width and height . */ public static void equalizeLocalCol ( GrayU16 input , int radius , int startX , GrayU16 output , IWorkArrays workArrays ) { } }
int width = 2 * radius + 1 ; int area = width * width ; int maxValue = workArrays . length ( ) - 1 ; int [ ] histogram = workArrays . pop ( ) ; int [ ] transform = workArrays . pop ( ) ; // specify the top and bottom of the histogram window and make sure it is inside bounds int hist0 = startX ; int hist1 = startX + width ; if ( hist1 > input . width ) { hist1 = input . width ; hist0 = hist1 - width ; } // initialize the histogram . ignore top border localHistogram ( input , hist0 , 0 , hist1 , width , histogram ) ; // compute transformation table int sum = 0 ; for ( int i = 0 ; i < histogram . length ; i ++ ) { transform [ i ] = sum += histogram [ i ] ; } // compute the output across the row int indexIn = input . startIndex + radius * input . stride + startX ; int indexOut = output . startIndex + radius * output . stride + startX ; for ( int x = 0 ; x < radius ; x ++ ) { int inputValue = input . data [ indexIn ++ ] & 0xff ; output . data [ indexOut ++ ] = ( short ) ( ( transform [ inputValue ] * maxValue ) / area ) ; } // move down while equalizing the rows one at a time for ( int y = radius + 1 ; y < input . height - radius ; y ++ ) { // remove the top most row indexIn = input . startIndex + ( y - radius - 1 ) * input . stride ; for ( int x = hist0 ; x < hist1 ; x ++ ) { histogram [ input . data [ indexIn + x ] & 0xFFFF ] -- ; } // add the bottom most row indexIn += width * input . stride ; for ( int x = hist0 ; x < hist1 ; x ++ ) { histogram [ input . data [ indexIn + x ] & 0xFFFF ] ++ ; } // compute transformation table sum = 0 ; for ( int i = 0 ; i < histogram . length ; i ++ ) { transform [ i ] = sum += histogram [ i ] ; } // compute the output across the row indexIn = input . startIndex + y * input . stride + startX ; indexOut = output . startIndex + y * output . stride + startX ; for ( int x = 0 ; x < radius ; x ++ ) { int inputValue = input . data [ indexIn ++ ] & 0xff ; output . data [ indexOut ++ ] = ( short ) ( ( transform [ inputValue ] * maxValue ) / area ) ; } } workArrays . recycle ( histogram ) ; workArrays . recycle ( transform ) ;
public class ListResolverEndpointsResult { /** * The resolver endpoints that were created by using the current AWS account , and that match the specified filters , * if any . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setResolverEndpoints ( java . util . Collection ) } or { @ link # withResolverEndpoints ( java . util . Collection ) } if * you want to override the existing values . * @ param resolverEndpoints * The resolver endpoints that were created by using the current AWS account , and that match the specified * filters , if any . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListResolverEndpointsResult withResolverEndpoints ( ResolverEndpoint ... resolverEndpoints ) { } }
if ( this . resolverEndpoints == null ) { setResolverEndpoints ( new java . util . ArrayList < ResolverEndpoint > ( resolverEndpoints . length ) ) ; } for ( ResolverEndpoint ele : resolverEndpoints ) { this . resolverEndpoints . add ( ele ) ; } return this ;
public class ListTagsForProjectResult { /** * The tags for the project . * @ param tags * The tags for the project . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListTagsForProjectResult withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class StorIOContentResolver { /** * Allows observe changes of required { @ link Uri } . * Notice , that returned { @ link Flowable } is " Hot Flowable " , it never ends , which means , * that you should manually dispose from it to prevent memory leak . * Also , it can cause BackPressure problems . * @ param uri { @ link Uri } that should be monitored . * @ return { @ link Flowable } of { @ link Changes } subscribed to changes of required Uri . */ @ NonNull public Flowable < Changes > observeChangesOfUri ( @ NonNull Uri uri , @ NonNull BackpressureStrategy backpressureStrategy ) { } }
return observeChangesOfUris ( Collections . singleton ( uri ) , backpressureStrategy ) ;
public class Validator { /** * Load a process definition from file . * @ param def * The { @ link File } which contains a definition . * @ return The definition in { @ link Document } format , or < code > null < / code > if the file could not be found or didn ' t contain parseable XML . */ static Document loadDefinition ( final File def ) { } }
// Parse the jDPL definition into a DOM tree . final Document document = XmlUtils . parseFile ( def ) ; if ( document == null ) { return null ; } // Log the jPDL version from the process definition ( if applicable and available ) . final Node xmlnsNode = document . getFirstChild ( ) . getAttributes ( ) . getNamedItem ( "xmlns" ) ; if ( xmlnsNode != null && StringUtils . isNotBlank ( xmlnsNode . getNodeValue ( ) ) && xmlnsNode . getNodeValue ( ) . contains ( "jpdl" ) ) { final String version = xmlnsNode . getNodeValue ( ) . substring ( xmlnsNode . getNodeValue ( ) . length ( ) - 3 ) ; LOGGER . info ( "jPDL version == " + version ) ; } return document ;
public class StreamingManager { /** * * 从直播流数据中录制点播 , 该方法可以指定录制的时间段 * @ param streamKey 流名称 * @ param fileName 录制后保存的文件名 * @ param start 录制开始的时间戳 , 单位秒 * @ param end 录制结束的时间戳 , 单位秒 * @ param other 文档中指定的其它参数 */ public String saveAs ( String streamKey , String fileName , long start , long end , StringMap other ) throws QiniuException { } }
String path = encodeKey ( streamKey ) + "/saveas" ; StringMap param = other != null ? other : new StringMap ( ) ; param . putNotEmpty ( "fname" , fileName ) . put ( "start" , start ) . put ( "end" , end ) ; String body = param . jsonString ( ) ; SaveRet r = post ( path , body , SaveRet . class ) ; return r . fname ;
public class DnsEndpointGroupBuilder { /** * Sets the { @ link EventLoop } to use for sending DNS queries . */ public final B eventLoop ( EventLoop eventLoop ) { } }
requireNonNull ( eventLoop , "eventLoop" ) ; checkArgument ( TransportType . isSupported ( eventLoop ) , "unsupported event loop type: %s" , eventLoop ) ; this . eventLoop = eventLoop ; return self ( ) ;
public class GVRAvatar { /** * Stops the currently running animation , if any . * @ see GVRAvatar # start ( String ) * @ see GVRAnimationEngine # stop ( GVRAnimation ) */ public void stop ( ) { } }
synchronized ( mAnimQueue ) { if ( mIsRunning && ( mAnimQueue . size ( ) > 0 ) ) { mIsRunning = false ; GVRAnimator animator = mAnimQueue . get ( 0 ) ; mAnimQueue . clear ( ) ; animator . stop ( ) ; } }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PurchaseOrder } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "PurchaseOrder" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < PurchaseOrder > createPurchaseOrder ( PurchaseOrder value ) { } }
return new JAXBElement < PurchaseOrder > ( _PurchaseOrder_QNAME , PurchaseOrder . class , null , value ) ;
public class ConcurrentBoundedWorkUnitList { /** * Get the { @ link List } of { @ link WorkUnit } s in this container . */ public List < WorkUnit > getWorkUnits ( ) { } }
ImmutableList . Builder < WorkUnit > allWorkUnits = ImmutableList . builder ( ) ; for ( List < WorkUnit > workUnits : this . workUnitsMap . values ( ) ) { allWorkUnits . addAll ( workUnits ) ; } return allWorkUnits . build ( ) ;
public class LogTransformer { /** * doClass ( ) process a single class by first creates a class description from * the byte codes . If it is a class ( i . e . not an interface ) the methods * defined have bodies , and a static final logger object is added with the * name of this class as an argument , and each method then gets processed with * doMethod ( . . . ) to have logger calls added . * @ param name * class name ( slashes separate , not dots ) * @ param clazz * @ param b * @ return */ private byte [ ] doClass ( String name , Class < ? > clazz , byte [ ] b ) { } }
ClassPool pool = ClassPool . getDefault ( ) ; CtClass cl = null ; try { cl = pool . makeClass ( new ByteArrayInputStream ( b ) ) ; if ( cl . isInterface ( ) == false ) { loggerName = "_____log" ; // We have to declare the log variable . String pattern1 = "private static org.slf4j.Logger {};" ; String loggerDefinition = format ( pattern1 , loggerName ) . getMessage ( ) ; CtField field = CtField . make ( loggerDefinition , cl ) ; // and assign it the appropriate value . String pattern2 = "org.slf4j.LoggerFactory.getLogger({}.class);" ; String replace = name . replace ( '/' , '.' ) ; String getLogger = format ( pattern2 , replace ) . getMessage ( ) ; cl . addField ( field , getLogger ) ; // then check every behaviour ( which includes methods ) . We are // only // interested in non - empty ones , as they have code . // NOTE : This will be changed , as empty methods should be // instrumented too . CtBehavior [ ] methods = cl . getDeclaredBehaviors ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { if ( methods [ i ] . isEmpty ( ) == false ) { doMethod ( methods [ i ] ) ; } } b = cl . toBytecode ( ) ; } } catch ( Exception e ) { System . err . println ( "Could not instrument " + name + ", " + e ) ; e . printStackTrace ( System . err ) ; } finally { if ( cl != null ) { cl . detach ( ) ; } } return b ;
public class EndiannessUtil { public static < R > float readFloat ( ByteAccessStrategy < R > strategy , R resource , long offset , boolean useBigEndian ) { } }
return useBigEndian ? readFloatB ( strategy , resource , offset ) : readFloatL ( strategy , resource , offset ) ;
public class Sets { /** * Create a unmodifiable set with contents . * @ param contents The contents of the set . * @ return A unmodifiable set containing contents . */ public static < T > Set < T > newUnmodifiableSet ( T ... contents ) { } }
Set < T > set ; if ( contents == null || contents . length == 0 ) return Collections . emptySet ( ) ; if ( contents . length == 1 ) return Collections . singleton ( contents [ 0 ] ) ; set = newSet ( contents . length ) ; Collections . addAll ( set , contents ) ; return Collections . unmodifiableSet ( set ) ;
public class GLContext { /** * Returns a ( created on demand , then cached ) image used when filling solid color quads or * triangles . */ Image fillImage ( ) { } }
if ( fillImage == null ) { CanvasImage image = platform . graphics ( ) . createImage ( 1 , 1 ) ; image . canvas ( ) . setFillColor ( 0xFFFFFFFF ) . fillRect ( 0 , 0 , image . width ( ) , image . height ( ) ) ; fillImage = image ; } return fillImage ;
public class DemoActivity { /** * Applying pager image animation state : fading out toolbar , title and background . */ private void applyFullPagerState ( float position , boolean isLeaving ) { } }
views . fullBackground . setVisibility ( position == 0f ? View . INVISIBLE : View . VISIBLE ) ; views . fullBackground . setAlpha ( position ) ; views . pagerToolbar . setVisibility ( position == 0f ? View . INVISIBLE : View . VISIBLE ) ; views . pagerToolbar . setAlpha ( isSystemUiShown ( ) ? position : 0f ) ; views . pagerTitle . setVisibility ( position == 1f ? View . VISIBLE : View . INVISIBLE ) ; if ( isLeaving && position == 0f ) { pagerAdapter . setActivated ( false ) ; showSystemUi ( true ) ; }
public class HighLevelAbstractionDefinition { /** * Gets the relation between two temporal extended proposition definitions . * @ param lhsDef * a { @ link TemporalExtendedPropositionDefinition } . * @ param rhsDef * a { @ link TemporalExtendedPropositionDefinition } . * @ return a < code > Relation < / code > , or < code > null < / code > if none was * found . */ public Relation getRelation ( TemporalExtendedPropositionDefinition lhsDef , TemporalExtendedPropositionDefinition rhsDef ) { } }
return this . defPairsMap . get ( Arrays . asList ( lhsDef , rhsDef ) ) ;
public class CmsDependenciesEdit { /** * Creates the list of widgets for this dialog . < p > */ @ Override protected void defineWidgets ( ) { } }
initModule ( ) ; setKeyPrefix ( KEY_PREFIX ) ; addWidget ( new CmsWidgetDialogParameter ( m_dependency , "name" , PAGES [ 0 ] , new CmsSelectWidget ( getModules ( ) ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( m_dependency , "version.version" , PAGES [ 0 ] , new CmsInputWidget ( ) ) ) ;
public class Conditions { /** * Returns a { @ link ICondition condition } that is satisfied by a { @ link org . cornutum . tcases . PropertySet } that contains * between a specified minimum ( inclusive ) and maximum ( inclusive ) number of instances of a property . */ public static Between between ( String property , int minimum , int maximum ) { } }
return new Between ( notLessThan ( property , minimum ) , notMoreThan ( property , maximum ) ) ;
public class Resolve { /** * Find a global type in given scope and load corresponding class . * @ param env The current environment . * @ param scope The scope in which to look for the type . * @ param name The type ' s name . */ Symbol findGlobalType ( Env < AttrContext > env , Scope scope , Name name , RecoveryLoadClass recoveryLoadClass ) { } }
Symbol bestSoFar = typeNotFound ; for ( Symbol s : scope . getSymbolsByName ( name ) ) { Symbol sym = loadClass ( env , s . flatName ( ) , recoveryLoadClass ) ; if ( bestSoFar . kind == TYP && sym . kind == TYP && bestSoFar != sym ) return new AmbiguityError ( bestSoFar , sym ) ; else bestSoFar = bestOf ( bestSoFar , sym ) ; } return bestSoFar ;
public class Writer { /** * Perform a For Comprehension over a Writer , accepting 3 generating function . * This results in a four level nested internal iteration over the provided Writers . * < pre > * { @ code * import static com . oath . cyclops . reactor . Writers . forEach4; * forEach4 ( Writer . just ( 1 ) , * a - > Writer . just ( a + 1 ) , * ( a , b ) - > Writer . < Integer > just ( a + b ) , * a ( a , b , c ) - > Writer . < Integer > just ( a + b + c ) , * Tuple : : tuple ) * < / pre > * @ param value1 top level Writer * @ param value2 Nested Writer * @ param value3 Nested Writer * @ param value4 Nested Writer * @ param yieldingFunction Generates a result per combination * @ return Writer with a combined value generated by the yielding function */ public < R1 , R2 , R3 , R4 > Writer < W , R4 > forEach4 ( Function < ? super T , ? extends Writer < W , R1 > > value2 , BiFunction < ? super T , ? super R1 , ? extends Writer < W , R2 > > value3 , Function3 < ? super T , ? super R1 , ? super R2 , ? extends Writer < W , R3 > > value4 , Function4 < ? super T , ? super R1 , ? super R2 , ? super R3 , ? extends R4 > yieldingFunction ) { } }
return this . flatMap ( in -> { Writer < W , R1 > a = value2 . apply ( in ) ; return a . flatMap ( ina -> { Writer < W , R2 > b = value3 . apply ( in , ina ) ; return b . flatMap ( inb -> { Writer < W , R3 > c = value4 . apply ( in , ina , inb ) ; return c . map ( in2 -> { return yieldingFunction . apply ( in , ina , inb , in2 ) ; } ) ; } ) ; } ) ; } ) ;
public class TTFSubSetFile { /** * Returns a List containing the glyph itself plus all glyphs * that this composite glyph uses * @ param in The input from which to determine the included glyphs * @ param glyphOffset The offset the glyph * @ param glyphIdx The index of the base glyph * @ return The list of glyphs building the composite * @ throws IOException Indicates a failure to read from the font file */ private List getIncludedGlyphs ( FontFileReader in , int glyphOffset , Integer glyphIdx ) throws IOException { } }
List ret = new ArrayList ( ) ; ret . add ( glyphIdx ) ; int offset = glyphOffset + ( int ) mtxTab [ glyphIdx . intValue ( ) ] . getOffset ( ) + 10 ; Integer compositeIdx = null ; int flags = 0 ; boolean moreComposites = true ; while ( moreComposites ) { flags = in . readTTFUShort ( offset ) ; compositeIdx = new Integer ( in . readTTFUShort ( offset + 2 ) ) ; ret . add ( compositeIdx ) ; offset += 4 ; if ( ( flags & 1 ) > 0 ) { // ARG _ 1 _ AND _ ARG _ 2 _ ARE _ WORDS offset += 4 ; } else { offset += 2 ; } if ( ( flags & 8 ) > 0 ) { offset += 2 ; // WE _ HAVE _ A _ SCALE } else if ( ( flags & 64 ) > 0 ) { offset += 4 ; // WE _ HAVE _ AN _ X _ AND _ Y _ SCALE } else if ( ( flags & 128 ) > 0 ) { offset += 8 ; // WE _ HAVE _ A _ TWO _ BY _ TWO } if ( ( flags & 32 ) > 0 ) { moreComposites = true ; } else { moreComposites = false ; } } return ret ;
public class CacheSpec { /** * Concurrency level t . * @ param < T > the type parameter * @ param concurrencyLevel the concurrency level * @ return the t */ public < T extends CacheSpec < K , V > > T concurrencyLevel ( int concurrencyLevel ) { } }
checkArgument ( concurrencyLevel > 0 , "Concurrency Level must be > 0" ) ; this . concurrencyLevel = concurrencyLevel ; return Cast . as ( this ) ;
public class AWSDatabaseMigrationServiceClient { /** * Returns information about the type of endpoints available . * @ param describeEndpointTypesRequest * @ return Result of the DescribeEndpointTypes operation returned by the service . * @ sample AWSDatabaseMigrationService . DescribeEndpointTypes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DescribeEndpointTypes " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeEndpointTypesResult describeEndpointTypes ( DescribeEndpointTypesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeEndpointTypes ( request ) ;
public class ConverterTagHandlerDelegate { /** * Set Converter instance on parent ValueHolder if it ' s not being restored . * < ol > * < li > Cast to ValueHolder < / li > * < li > If " binding " attribute was specified , fetch / create and re - bind to expression . < / li > * < li > Otherwise , call { @ link # createConverter ( FaceletContext ) createConverter } . < / li > * < li > Call { @ link ObjectHandler # setAttributes ( FaceletContext , Object ) setAttributes } on Converter instance . < / li > * < li > Set the Converter on the ValueHolder < / li > * < li > If the ValueHolder has a localValue , convert it and set the value < / li > * < / ol > * @ see ValueHolder * @ see Converter * @ see # createConverter ( FaceletContext ) * @ see javax . faces . view . facelets . FaceletHandler # apply ( javax . faces . view . facelets . FaceletContext , javax . faces . component . UIComponent ) */ @ Override public void apply ( FaceletContext ctx , UIComponent parent ) throws IOException { } }
// only process if it ' s been created if ( ! ComponentHandler . isNew ( parent ) ) { return ; } if ( parent instanceof ValueHolder ) { applyAttachedObject ( ctx . getFacesContext ( ) , parent ) ; } else if ( UIComponent . isCompositeComponent ( parent ) ) { if ( getFor ( ) == null ) { throw new TagException ( _delegate . getTag ( ) , "is nested inside a composite component" + " but does not have a for attribute." ) ; } FaceletCompositionContext mctx = FaceletCompositionContext . getCurrentInstance ( ctx ) ; mctx . addAttachedObjectHandler ( parent , _delegate ) ; } else { throw new TagException ( _delegate . getTag ( ) , "Parent not composite component or an instance of ValueHolder: " + parent ) ; }
public class Matrix3x2f { /** * Set the values of this matrix by reading 6 float values from the given { @ link FloatBuffer } in column - major order , * starting at its current position . * The FloatBuffer is expected to contain the values in column - major order . * The position of the FloatBuffer will not be changed by this method . * @ param buffer * the FloatBuffer to read the matrix values from in column - major order * @ return this */ public Matrix3x2f set ( FloatBuffer buffer ) { } }
int pos = buffer . position ( ) ; MemUtil . INSTANCE . get ( this , pos , buffer ) ; return this ;
public class AIProtocolItemStream { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . store . itemstreams . SIMPItemStream # getPersistentData ( java . io . ObjectOutputStream ) */ public void getPersistentData ( ObjectOutputStream oos ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getPersistentData" , oos ) ; try { HashMap < String , Object > hm = new HashMap < String , Object > ( ) ; hm . put ( "streamId" , streamId . toByteArray ( ) ) ; hm . put ( "flushStarted" , new Boolean ( flushStarted ) ) ; oos . writeObject ( hm ) ; } catch ( IOException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.store.itemstreams.AIProtocolItemStream.getPersistentData" , "1:146:1.26" , this ) ; SIErrorException e2 = new SIErrorException ( e ) ; SibTr . exception ( tc , e2 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getPersistentData" , e2 ) ; throw e2 ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getPersistentData" ) ;
public class Percentile { /** * Creates a Percentile instance with a Double value if in the valid range as described in the * < a href = " https : / / keen . io / docs / api / # percentile " > Keen APIs < / a > . This factory will clamp * values into the appropriate range and round to two decimal places . * @ param percentile The percentile to calculate , coerced to ( 0 , 100.00 ] with two decimal places * of precision . * @ return A Percentile instance representing the percentile to calculate . */ public static Percentile createCoerced ( double percentile ) { } }
// Percentile range is entirely positive , so no need for abs ( ) . double clamped = Math . max ( MIN_PERCENTILE , Math . min ( MAX_PERCENTILE , percentile ) ) ; // Since the range is 0-100 , and only 2 decimal places this technique is fine . double rounded = ( ( double ) Math . round ( clamped * 100 ) ) / 100 ; return new Percentile ( rounded ) ;
public class JsonInputValidatorFactory { /** * Instantiates a validator depending on the class passed as a parameter . * @ param clazz the class the input will be validated to * @ return JsonInputValidator */ public static JsonInputValidator getValidator ( Class < ? > clazz ) { } }
if ( clazz . equals ( Scope . class ) ) { return ScopeValidator . getInstance ( ) ; } if ( clazz . equals ( ApplicationInfo . class ) ) { return ApplicationInfoValidator . getInstance ( ) ; } return null ;
public class AffectedChecker { /** * The user has to specify directory that keep coverage and optionally * mode that should be used to print non affected classes . */ public static void main ( String [ ] args ) { } }
// Parse arguments . String coverageDirName = null ; if ( args . length == 0 ) { System . out . println ( "Incorrect arguments. Directory with coverage has to be specified." ) ; System . exit ( 1 ) ; } coverageDirName = args [ 0 ] ; String mode = null ; if ( args . length > 1 ) { mode = args [ 1 ] ; } boolean forceCacheUse = false ; if ( args . length > 2 ) { forceCacheUse = args [ 2 ] . equals ( FORCE_CACHE_USE ) ; } Set < String > allClasses = new HashSet < String > ( ) ; Set < String > affectedClasses = new HashSet < String > ( ) ; if ( args . length > 3 ) { String options = args [ 3 ] ; Config . loadConfig ( options , true ) ; } else { Config . loadConfig ( ) ; } List < String > nonAffectedClasses = findNonAffectedClasses ( coverageDirName , forceCacheUse , allClasses , affectedClasses ) ; // Print non affected classes . printNonAffectedClasses ( allClasses , affectedClasses , nonAffectedClasses , mode ) ;
public class SSLEngineFactory { /** * Returns a new { @ link SSLEngine } constructed from the config settings . * @ return a { @ link SSLEngine } ready to be used . */ public SSLEngine get ( ) { } }
try { String pass = env . sslKeystorePassword ( ) ; char [ ] password = pass == null || pass . isEmpty ( ) ? null : pass . toCharArray ( ) ; KeyStore ks = env . sslKeystore ( ) ; if ( ks == null ) { ks = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; String ksFile = env . sslKeystoreFile ( ) ; if ( ksFile == null || ksFile . isEmpty ( ) ) { throw new IllegalArgumentException ( "Path to Keystore File must not be null or empty." ) ; } ks . load ( new FileInputStream ( ksFile ) , password ) ; } String defaultAlgorithm = KeyManagerFactory . getDefaultAlgorithm ( ) ; KeyManagerFactory kmf = KeyManagerFactory . getInstance ( defaultAlgorithm ) ; TrustManagerFactory tmf = TrustManagerFactory . getInstance ( defaultAlgorithm ) ; kmf . init ( ks , password ) ; tmf . init ( ks ) ; SSLContext ctx = SSLContext . getInstance ( "TLS" ) ; ctx . init ( kmf . getKeyManagers ( ) , tmf . getTrustManagers ( ) , null ) ; SSLEngine engine = ctx . createSSLEngine ( ) ; engine . setUseClientMode ( true ) ; return engine ; } catch ( Exception ex ) { throw new SSLException ( "Could not create SSLEngine." , ex ) ; }
public class MapConverter { /** * get . * @ param data a { @ link java . util . Map } object . * @ param name a { @ link java . lang . String } object . * @ param clazz a { @ link java . lang . Class } object . * @ param < T > a T object . * @ return a T object . */ public < T > T get ( Map < String , Object > data , String name , Class < T > clazz ) { } }
return convert ( get ( data , name ) , clazz ) ;
public class Kryo { /** * Resets object graph state : unregistered class names , references to previously serialized or deserialized objects , the * { @ link # getOriginalToCopyMap ( ) original to copy map } , and the { @ link # getGraphContext ( ) graph context } . If * { @ link # setAutoReset ( boolean ) auto reset } is true , this method is called automatically when an object graph has been * completely serialized or deserialized . If overridden , the super method must be called . */ public void reset ( ) { } }
depth = 0 ; if ( graphContext != null ) graphContext . clear ( 2048 ) ; classResolver . reset ( ) ; if ( references ) { referenceResolver . reset ( ) ; readObject = null ; } copyDepth = 0 ; if ( originalToCopy != null ) originalToCopy . clear ( 2048 ) ; if ( TRACE ) trace ( "kryo" , "Object graph complete." ) ;
public class QuickDrawContext { /** * Drawing Rounded Rectangles : */ private static RoundRectangle2D . Double toRoundRect ( final Rectangle2D pRectangle , final int pArcW , final int pArcH ) { } }
return new RoundRectangle2D . Double ( pRectangle . getX ( ) , pRectangle . getY ( ) , pRectangle . getWidth ( ) , pRectangle . getHeight ( ) , pArcW , pArcH ) ;
public class LssClient { /** * list domain ' s statistics in the live stream service . * @ param request The request object containing all options for listing domain ' s traffic statistics * @ return the response */ public ListDomainStatisticsResponse listDomainStatistics ( ListDomainStatisticsRequest request ) { } }
checkNotNull ( request , "The parameter request should NOT be null." ) ; checkStringNotEmpty ( request . getStartTime ( ) , "startTime should NOT be empty" ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . GET , request , STATISTICS , LIVE_DOMAIN , "list" ) ; internalRequest . addParameter ( "startTime" , request . getStartTime ( ) ) ; if ( request . getEndTime ( ) != null ) { internalRequest . addParameter ( "endTime" , request . getEndTime ( ) ) ; } if ( request . getKeyword ( ) != null ) { internalRequest . addParameter ( "keyword" , request . getKeyword ( ) ) ; } if ( request . getKeywordType ( ) != null ) { internalRequest . addParameter ( "keywordType" , request . getKeywordType ( ) ) ; } if ( request . getOrderBy ( ) != null ) { internalRequest . addParameter ( "orderBy" , request . getOrderBy ( ) ) ; } return invokeHttpClient ( internalRequest , ListDomainStatisticsResponse . class ) ;
public class JsonParser { /** * { @ link Beta } < br > * Parse a JSON object , array , or value into a new instance of the given destination class using * { @ link JsonParser # parse ( Class , CustomizeJsonParser ) } , and then closes the parser . * @ param < T > destination class * @ param destinationClass destination class that has a public default constructor to use to * create a new instance * @ param customizeParser optional parser customizer or { @ code null } for none * @ return new instance of the parsed destination class */ @ Beta public final < T > T parseAndClose ( Class < T > destinationClass , CustomizeJsonParser customizeParser ) throws IOException { } }
try { return parse ( destinationClass , customizeParser ) ; } finally { close ( ) ; }
public class JobInProgress { /** * Returns the total job counters , by adding together the job , * the map and the reduce counters . */ public Counters getCounters ( ) { } }
Counters result = new Counters ( ) ; synchronized ( this ) { result . incrAllCounters ( getJobCounters ( ) ) ; } incrementTaskCounters ( result , maps ) ; return incrementTaskCounters ( result , reduces ) ;
public class OWLAPIPreconditions { /** * check for absent and throw IllegalArgumentException if null or absent * @ param object * reference to check * @ param message * message for the illegal argument exception * @ param < T > * reference type * @ return the input reference if not null * @ throws IllegalArgumentException * if object is null */ @ Nonnull public static < T > T checkNotNull ( Optional < T > object , @ Nonnull String message ) { } }
if ( object == null || ! object . isPresent ( ) ) { throw new IllegalArgumentException ( message ) ; } return verifyNotNull ( object . get ( ) ) ;
public class Streams { /** * Reads all input into memory , close the steam , and return as a String . Reads * the input * @ param inputStream InputStream to read from . * @ param charset the charset to interpret the input as . * @ return String contents of the stream . * @ throws IOException if there is an problem reading from the stream . */ public static String readAll ( final InputStream inputStream , Charset charset ) throws IOException { } }
return new ByteSource ( ) { @ Override public InputStream openStream ( ) { return inputStream ; } } . asCharSource ( charset ) . read ( ) ;
public class MessageArgsUnitParser { /** * Based on the unit we ' re converting to , guess the input unit . For example , if we ' re * converting to MEGABIT and no input unit was specified , assume BIT . */ protected static Unit inputFromExactUnit ( Unit exact , UnitConverter converter ) { } }
switch ( exact ) { case TERABIT : case GIGABIT : case MEGABIT : case KILOBIT : case BIT : return Unit . BIT ; case TERABYTE : case GIGABYTE : case MEGABYTE : case KILOBYTE : case BYTE : return Unit . BYTE ; default : break ; } UnitCategory category = exact . category ( ) ; switch ( category ) { case CONSUMPTION : return converter . consumptionUnit ( ) ; case ELECTRIC : return Unit . AMPERE ; case FREQUENCY : return Unit . HERTZ ; case LIGHT : return Unit . LUX ; case PRESSURE : return Unit . MILLIBAR ; case SPEED : return converter . speedUnit ( ) ; case TEMPERATURE : return converter . temperatureUnit ( ) ; default : UnitFactorSet factorSet = getDefaultFactorSet ( category , converter ) ; if ( factorSet != null ) { return factorSet . base ( ) ; } break ; } return null ;
public class ResponseMessage { /** * @ see javax . servlet . ServletResponse # setCharacterEncoding ( java . lang . String ) */ @ Override public void setCharacterEncoding ( String charset ) { } }
if ( null != this . outWriter || isCommitted ( ) ) { return ; } if ( null != charset ) { this . encoding = connection . getEncodingUtils ( ) . stripQuotes ( charset ) ; } if ( null != this . contentType ) { int index = this . contentType . indexOf ( "charset=" ) ; StringBuilder sb = new StringBuilder ( ) ; if ( - 1 != index ) { sb . append ( this . contentType . substring ( 0 , index - 1 ) . trim ( ) ) ; } else { sb . append ( this . contentType ) ; } if ( this . encoding != null ) { sb . append ( ";charset=" ) . append ( this . encoding ) ; } this . contentType = sb . toString ( ) ; this . response . setHeader ( "Content-Type" , this . contentType ) ; }