signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FieldUtils { /** * Writes a named { @ code static } { @ link Field } . Superclasses will be considered . * @ param cls * { @ link Class } on which the field is to be found * @ param fieldName * to write * @ param value * to set * @ param forceAccess * whether to break scope restrictions using the * { @ link java . lang . reflect . AccessibleObject # setAccessible ( boolean ) } method . { @ code false } will only * match { @ code public } fields . * @ throws IllegalArgumentException * if { @ code cls } is { @ code null } , the field name is blank or empty , the field cannot be located or is * not { @ code static } , or { @ code value } is not assignable * @ throws IllegalAccessException * if the field is not made accessible or is { @ code final } */ public static void writeStaticField ( final Class < ? > cls , final String fieldName , final Object value , final boolean forceAccess ) throws IllegalAccessException { } }
final Field field = getField ( cls , fieldName , forceAccess ) ; Validate . isTrue ( field != null , "Cannot locate field %s on %s" , fieldName , cls ) ; // already forced access above , don ' t repeat it here : writeStaticField ( field , value , false ) ;
public class QuickStartSecurity { /** * Unregister the quick start security security UserRegistryConfiguration . */ private void unregisterQuickStartSecurityRegistryConfiguration ( ) { } }
if ( urConfigReg != null ) { urConfigReg . unregister ( ) ; urConfigReg = null ; quickStartRegistry = null ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "QuickStartSecurityRegistry configuration is not registered." ) ; } }
public class ConsumerSessionImpl { /** * Retrieve the MPSubscription object that represents the subscription ( durable or non - durable ) * that this ConsumerSession is feeding from * This function is only available on locally homed subscriptions * Performing this against a queue consumer results in a SIDurableSubscriptionNotFoundException */ @ Override public MPSubscription getSubscription ( ) throws SIDurableSubscriptionNotFoundException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getSubscription" ) ; ConsumerDispatcher cd = ( ConsumerDispatcher ) _localConsumerPoint . getConsumerManager ( ) ; MPSubscription mpSubscription = cd . getMPSubscription ( ) ; if ( mpSubscription == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConsumerSession . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConsumerSession . tc , "getSubscription" , "SIDurableSubscriptionNotFoundException" ) ; throw new SIDurableSubscriptionNotFoundException ( nls . getFormattedMessage ( "SUBSCRIPTION_DOESNT_EXIST_ERROR_CWSIP0146" , new Object [ ] { null , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getSubscription" , mpSubscription ) ; return mpSubscription ;
public class InternalXbaseParser { /** * $ ANTLR start synpred66 _ InternalXbase */ public final void synpred66_InternalXbase_fragment ( ) throws RecognitionException { } }
// InternalXbase . g : 2711:2 : ( ( ( rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 ) ) ) // InternalXbase . g : 2711:2 : ( ( rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 ) ) { // InternalXbase . g : 2711:2 : ( ( rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 ) ) // InternalXbase . g : 2712:3 : ( rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXFeatureCallAccess ( ) . getFeatureCallArgumentsAssignment_3_1_0 ( ) ) ; } // InternalXbase . g : 2713:3 : ( rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 ) // InternalXbase . g : 2713:4 : rule _ _ XFeatureCall _ _ FeatureCallArgumentsAssignment _ 3_1_0 { pushFollow ( FOLLOW_2 ) ; rule__XFeatureCall__FeatureCallArgumentsAssignment_3_1_0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } }
public class Consumers { /** * Yields all elements of the iterator ( in the provided map ) . * @ param < M > the returned map type * @ param < K > the map key type * @ param < V > the map value type * @ param iterator the iterator that will be consumed * @ param map the map where the iterator is consumed * @ return the map filled with iterator values */ public static < M extends Map < K , V > , K , V > M dict ( Iterator < Pair < K , V > > iterator , M map ) { } }
dbc . precondition ( map != null , "cannot call dict with a null map" ) ; final Function < Iterator < Pair < K , V > > , M > consumer = new ConsumeIntoMap < > ( new ConstantSupplier < M > ( map ) ) ; return consumer . apply ( iterator ) ;
public class QueryExecution { /** * unsafely coerce Map < String , String > to Map < CharSequence , CharSequence > */ @ SuppressWarnings ( "unchecked" ) private Map < CharSequence , CharSequence > sneakyCast ( Object m ) { } }
return ( Map < CharSequence , CharSequence > ) m ;
public class AnimatedDialog { /** * < / p > Closes the dialog with a translation animation to the content view < / p > */ private void slideClose ( ) { } }
if ( ! isClosing_ ) { isClosing_ = true ; TranslateAnimation slideDown = new TranslateAnimation ( Animation . RELATIVE_TO_SELF , 0 , Animation . RELATIVE_TO_SELF , 0 , Animation . RELATIVE_TO_SELF , 0.0f , Animation . RELATIVE_TO_SELF , 1f ) ; slideDown . setDuration ( 500 ) ; slideDown . setInterpolator ( new DecelerateInterpolator ( ) ) ; ( ( ViewGroup ) getWindow ( ) . getDecorView ( ) ) . getChildAt ( 0 ) . startAnimation ( slideDown ) ; slideDown . setAnimationListener ( new Animation . AnimationListener ( ) { @ Override public void onAnimationStart ( Animation animation ) { } @ Override public void onAnimationEnd ( Animation animation ) { dismiss ( ) ; } @ Override public void onAnimationRepeat ( Animation animation ) { } } ) ; }
public class DateUtils { /** * Attempt to extract a date or date range in standard format from a provided verbatim * date string . * @ param verbatimEventDate a string containing a verbatim event date . * @ return a map with result and resultState as keys * @ deprecated * @ see # extractDateFromVerbatimER ( String ) replacement method . */ public static Map < String , String > extractDateFromVerbatim ( String verbatimEventDate ) { } }
return extractDateFromVerbatim ( verbatimEventDate , DateUtils . YEAR_BEFORE_SUSPECT ) ;
public class BeanAnalyzer { /** * A function that returns a Map of all the available properties on * a given class including write - only properties . The properties returned * is mostly a superset of those returned from the standard JavaBeans * Introspector except pure indexed properties are discarded . * < p > Interfaces receive all the properties available in Object . Arrays , * Strings and Collections all receive a " length " property . An array ' s * " length " PropertyDescriptor has no read or write methods . * < p > Instead of indexed properties , there may be keyed properties in the * map , represented by a { @ link KeyedPropertyDescriptor } . Arrays , Strings * and Lists always have keyed properties with a key type of int . * < p > Because the value returned from a keyed property method may be more * specific than the method signature describes ( such is often the case * with collections ) , a bean class can contain a special field that * indicates what that specific type should be . The signature of this field * is as follows : * < tt > public & nbsp ; static & nbsp ; final & nbsp ; Class & nbsp ; ELEMENT _ TYPE & nbsp ; = & nbsp ; & lt ; type & gt ; . class ; < / tt > . * @ return an unmodifiable mapping of property names ( Strings ) to * PropertyDescriptor objects . */ public static Map < String , PropertyDescriptor > getAllProperties ( GenericType root ) throws IntrospectionException { } }
Map < String , PropertyDescriptor > properties = cPropertiesCache . get ( root ) ; if ( properties == null ) { GenericType rootType = root . getRootType ( ) ; if ( rootType == null ) { rootType = root ; } properties = Collections . unmodifiableMap ( createProperties ( rootType , root ) ) ; cPropertiesCache . put ( root , properties ) ; } return properties ;
public class ListDatasetsResult { /** * A list of " DatasetSummary " objects . * @ param datasetSummaries * A list of " DatasetSummary " objects . */ public void setDatasetSummaries ( java . util . Collection < DatasetSummary > datasetSummaries ) { } }
if ( datasetSummaries == null ) { this . datasetSummaries = null ; return ; } this . datasetSummaries = new java . util . ArrayList < DatasetSummary > ( datasetSummaries ) ;
public class CPDefinitionOptionRelUtil { /** * Returns an ordered range of all the cp definition option rels where CPDefinitionId = & # 63 ; and skuContributor = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionOptionRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CPDefinitionId the cp definition ID * @ param skuContributor the sku contributor * @ param start the lower bound of the range of cp definition option rels * @ param end the upper bound of the range of cp definition option rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the ordered range of matching cp definition option rels */ public static List < CPDefinitionOptionRel > findByC_SC ( long CPDefinitionId , boolean skuContributor , int start , int end , OrderByComparator < CPDefinitionOptionRel > orderByComparator , boolean retrieveFromCache ) { } }
return getPersistence ( ) . findByC_SC ( CPDefinitionId , skuContributor , start , end , orderByComparator , retrieveFromCache ) ;
public class FeatureStyleInfo { /** * String identifier which is guaranteed to include sufficient information to assure to be different for two * instances which could produce different result . It is typically used as basis for calculation of hash codes ( like * MD5 , SHA1 , SHA2 etc ) of ( collections of ) objects . * @ return cacheId * @ since 1.8.0 */ public String getCacheId ( ) { } }
return "FeatureStyleInfo{" + "index=" + index + ", name='" + name + '\'' + ", formula='" + formula + '\'' + ", fillColor='" + fillColor + '\'' + ", fillOpacity=" + fillOpacity + ", strokeColor='" + strokeColor + '\'' + ", strokeOpacity=" + strokeOpacity + ", strokeWidth=" + strokeWidth + ", dashArray='" + dashArray + '\'' + ", symbol=" + symbol + ", styleId='" + styleId + '\'' + '}' ;
public class LongTuples { /** * Recursively increment the given tuple lexicographically , starting at * the given index . * @ param current The tuple to increment * @ param min The minimum values * @ param max The maximum values * @ param index The index * @ return Whether the tuple could be incremented */ private static boolean incrementLexicographically ( MutableLongTuple current , LongTuple min , LongTuple max , int index ) { } }
if ( index == - 1 ) { return false ; } long oldValue = current . get ( index ) ; long newValue = oldValue + 1 ; current . set ( index , newValue ) ; if ( newValue >= max . get ( index ) ) { current . set ( index , min . get ( index ) ) ; return incrementLexicographically ( current , min , max , index - 1 ) ; } return true ;
public class ParquetAvroWriters { /** * Creates a ParquetWriterFactory that accepts and writes Avro generic types . * The Parquet writers will use the given schema to build and write the columnar data . * @ param schema The schema of the generic type . */ public static ParquetWriterFactory < GenericRecord > forGenericRecord ( Schema schema ) { } }
final String schemaString = schema . toString ( ) ; final ParquetBuilder < GenericRecord > builder = ( out ) -> createAvroParquetWriter ( schemaString , GenericData . get ( ) , out ) ; return new ParquetWriterFactory < > ( builder ) ;
public class Jar { /** * Sets an attribute in a non - main section of the manifest . * @ param section the section ' s name * @ param name the attribute ' s name * @ param value the attribute ' s value * @ return { @ code this } * @ throws IllegalStateException if entries have been added or the JAR has been written prior to calling this methods . */ public final Jar setAttribute ( String section , String name , String value ) { } }
verifyNotSealed ( ) ; if ( jos != null ) throw new IllegalStateException ( "Manifest cannot be modified after entries are added." ) ; Attributes attr = getManifest ( ) . getAttributes ( section ) ; if ( attr == null ) { attr = new Attributes ( ) ; getManifest ( ) . getEntries ( ) . put ( section , attr ) ; } attr . putValue ( name , value ) ; return this ;
public class StringUtil { /** * Returns subtraction between given String arrays . * @ param arr1 first array * @ param arr2 second array * @ return arr1 without values which are not present in arr2 */ public static String [ ] subtraction ( String [ ] arr1 , String [ ] arr2 ) { } }
if ( arr1 == null || arr1 . length == 0 || arr2 == null || arr2 . length == 0 ) { return arr1 ; } List < String > list = new ArrayList < String > ( Arrays . asList ( arr1 ) ) ; list . removeAll ( Arrays . asList ( arr2 ) ) ; return list . toArray ( new String [ 0 ] ) ;
public class MarkdownNotebookOutput { /** * Format string . * @ param fmt the fmt * @ param args the args * @ return the string */ @ javax . annotation . Nonnull public String format ( @ javax . annotation . Nonnull String fmt , @ javax . annotation . Nonnull Object ... args ) { } }
return 0 == args . length ? fmt : String . format ( fmt , args ) ;
public class MetadataCache { /** * Creates a metadata cache archive file of all tracks in the specified slot on the specified player . Any * previous contents of the specified file will be replaced . If a non - { @ code null } { @ code listener } is * supplied , its { @ link MetadataCacheCreationListener # cacheCreationContinuing ( TrackMetadata , int , int ) } method * will be called after each track is added to the cache , allowing it to display progress updates to the user , * and to continue or cancel the process by returning { @ code true } or { @ code false } . * Because this takes a huge amount of time relative to CDJ status updates , it can only be performed while * the MetadataFinder is in passive mode . * @ param slot the slot in which the media to be cached can be found * @ param playlistId the id of playlist to be cached , or 0 of all tracks should be cached * @ param cache the file into which the metadata cache should be written * @ param listener will be informed after each track is added to the cache file being created and offered * the opportunity to cancel the process * @ throws Exception if there is a problem communicating with the player or writing the cache file */ @ SuppressWarnings ( { } }
"SameParameterValue" , "WeakerAccess" } ) public static void createMetadataCache ( final SlotReference slot , final int playlistId , final File cache , final MetadataCacheCreationListener listener ) throws Exception { ConnectionManager . ClientTask < Object > task = new ConnectionManager . ClientTask < Object > ( ) { @ SuppressWarnings ( "SameReturnValue" ) @ Override public Object useClient ( Client client ) throws Exception { final List < Message > trackList ; if ( playlistId == 0 ) { trackList = MetadataFinder . getInstance ( ) . getFullTrackList ( slot . slot , client , 0 ) ; } else { trackList = MetadataFinder . getInstance ( ) . getPlaylistItems ( slot . slot , 0 , playlistId , false , client ) ; } MetadataCache . copyTracksToCache ( trackList , playlistId , client , slot , cache , listener ) ; return null ; } } ; if ( cache . exists ( ) && ! cache . delete ( ) ) { logger . warn ( "Unable to delete cache file, {}" , cache ) ; } ConnectionManager . getInstance ( ) . invokeWithClientSession ( slot . player , task , "building metadata cache" ) ;
public class DataSiftPush { /** * Updates the name or output parameters for a push sucription * @ param id the subscription ID * @ param connector the output parameters to update to * @ param name an optional name to update with * @ return the updated push subscription */ public FutureData < PushSubscription > update ( String id , PushConnector connector , String name ) { } }
if ( id == null || id . isEmpty ( ) || connector == null ) { throw new IllegalArgumentException ( "A push subscription ID and output parameters is required" ) ; } FutureData < PushSubscription > future = new FutureData < > ( ) ; URI uri = newParams ( ) . forURL ( config . newAPIEndpointURI ( UPDATE ) ) ; POST request = config . http ( ) . POST ( uri , new PageReader ( newRequestCallback ( future , new PushSubscription ( ) , config ) ) ) . form ( "id" , id ) ; for ( Map . Entry < String , String > e : connector . parameters ( ) . verifyAndGet ( ) . entrySet ( ) ) { request . form ( e . getKey ( ) , e . getValue ( ) ) ; } if ( name != null && ! name . isEmpty ( ) ) { request . form ( "name" , name ) ; } performRequest ( future , request ) ; return future ;
public class HttpOutputStreamImpl { /** * @ see java . io . OutputStream # write ( byte [ ] , int , int ) */ @ Override public void write ( byte [ ] value , int start , int len ) throws IOException { } }
validate ( ) ; writeToBuffers ( value , start , len ) ;
public class Resource { /** * 得到Resource指定行数的内容 , 用于调试 , 报错等显示原有模板信息 , 如果获取不了 返回NUll * @ param start * @ param end * @ return */ public String getContent ( int start , int end ) throws IOException { } }
// bug , 混合回车符号也许定位不到准确行数 ? String lineSeparator = System . getProperty ( "line.separator" ) ; Reader br = null ; try { br = openReader ( ) ; BufferedReader reader = new BufferedReader ( br ) ; String line = null ; StringBuilder sb = new StringBuilder ( ) ; int index = 0 ; while ( ( line = reader . readLine ( ) ) != null ) { index ++ ; if ( index >= start && index <= end ) { sb . append ( line ) . append ( lineSeparator ) ; if ( index == end ) { break ; } } } return sb . toString ( ) ; } finally { if ( br != null ) { br . close ( ) ; } }
public class CamelCatalogHelper { /** * Checks whether the given key is a multi valued option * @ param scheme the component name * @ param key the option key * @ return < tt > true < / tt > if the key is multi valued , < tt > false < / tt > otherwise */ public static String getPrefix ( CamelCatalog camelCatalog , String scheme , String key ) { } }
// use the camel catalog String json = camelCatalog . componentJSonSchema ( scheme ) ; if ( json == null ) { throw new IllegalArgumentException ( "Could not find catalog entry for component name: " + scheme ) ; } List < Map < String , String > > data = JSonSchemaHelper . parseJsonSchema ( "properties" , json , true ) ; if ( data != null ) { for ( Map < String , String > propertyMap : data ) { String name = propertyMap . get ( "name" ) ; String prefix = propertyMap . get ( "prefix" ) ; if ( key . equals ( name ) ) { return prefix ; } } } return null ;
public class ChatRoomClient { /** * Add members to chat room * @ param roomId chat room id * @ param members username array * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper addChatRoomMember ( long roomId , String ... members ) throws APIConnectionException , APIRequestException { } }
Preconditions . checkArgument ( roomId > 0 , "room id is invalid" ) ; Preconditions . checkArgument ( members != null && members . length > 0 , "member should not be empty" ) ; JsonArray array = new JsonArray ( ) ; for ( String username : members ) { array . add ( new JsonPrimitive ( username ) ) ; } return _httpClient . sendPut ( _baseUrl + mChatRoomPath + "/" + roomId + "/members" , array . toString ( ) ) ;
public class FileUtilsV2_2 { /** * Copies a whole directory to a new location preserving the file dates . * This method copies the specified directory and all its child * directories and files to the specified destination . * The destination is the new location and name of the directory . * The destination directory is created if it does not exist . * If the destination directory did exist , then this method merges * the source with the destination , with the source taking precedence . * < strong > Note : < / strong > This method tries to preserve the files ' last * modified date / times using { @ link File # setLastModified ( long ) } , however * it is not guaranteed that those operations will succeed . * If the modification operation fails , no indication is provided . * @ param srcDir an existing directory to copy , must not be < code > null < / code > * @ param destDir the new directory , must not be < code > null < / code > * @ throws NullPointerException if source or destination is < code > null < / code > * @ throws IOException if source or destination is invalid * @ throws IOException if an IO error occurs during copying * @ since 1.1 */ public static void copyDirectory ( File srcDir , File destDir ) throws IOException { } }
copyDirectory ( srcDir , destDir , true ) ;
public class JsonReader { /** * Check if the passed Path can be resembled to valid Json content . This is * accomplished by fully parsing the Json file each time the method is called . * This consumes < b > less memory < / b > than calling any of the * < code > read . . . < / code > methods and checking for a non - < code > null < / code > * result . * @ param aPath * The file to be parsed . May not be < code > null < / code > . * @ param aFallbackCharset * The charset to be used for reading the Json file in case no BOM is * present . May not be < code > null < / code > . * @ return < code > true < / code > if the file can be parsed without error , * < code > false < / code > if not */ public static boolean isValidJson ( @ Nonnull final Path aPath , @ Nonnull final Charset aFallbackCharset ) { } }
return isValidJson ( new FileSystemResource ( aPath ) , aFallbackCharset ) ;
public class TrivialSwap { /** * Swap the elements of two float arrays at the specified positions . * @ param floatArray1 one of the arrays that will have one of its values swapped . * @ param array1Index the index of the first array that will be swapped . * @ param floatArray2 the other array that will have one of its values swapped . * @ param array2Index the index of the second array that will be swapped . */ public static void swap ( float [ ] floatArray1 , int array1Index , float [ ] floatArray2 , int array2Index ) { } }
if ( floatArray1 [ array1Index ] != floatArray2 [ array2Index ] ) { float hold = floatArray1 [ array1Index ] ; floatArray1 [ array1Index ] = floatArray2 [ array2Index ] ; floatArray2 [ array2Index ] = hold ; }
public class WriterOutputStream { /** * Write bytes from the specified byte array to the stream . * @ param b the byte array containing the bytes to write , * @ param off the start offset in the byte array , * @ param len the number of bytes to write . * @ throws IOException if writing operation to underlying target writer fails . */ @ Override public void write ( byte [ ] b , int off , int len ) throws IOException { } }
while ( len > 0 ) { int c = Math . min ( len , bytesBuffer . remaining ( ) ) ; bytesBuffer . put ( b , off , c ) ; processBytesBuffer ( false ) ; len -= c ; off += c ; }
public class PdfStamper { /** * Applies a digital signature to a document . The returned PdfStamper * can be used normally as the signature is only applied when closing . * A possible use is : * < pre > * KeyStore ks = KeyStore . getInstance ( " pkcs12 " ) ; * ks . load ( new FileInputStream ( " my _ private _ key . pfx " ) , " my _ password " . toCharArray ( ) ) ; * String alias = ( String ) ks . aliases ( ) . nextElement ( ) ; * PrivateKey key = ( PrivateKey ) ks . getKey ( alias , " my _ password " . toCharArray ( ) ) ; * Certificate [ ] chain = ks . getCertificateChain ( alias ) ; * PdfReader reader = new PdfReader ( " original . pdf " ) ; * FileOutputStream fout = new FileOutputStream ( " signed . pdf " ) ; * PdfStamper stp = PdfStamper . createSignature ( reader , fout , ' \ 0 ' , new File ( " / temp " ) ) ; * PdfSignatureAppearance sap = stp . getSignatureAppearance ( ) ; * sap . setCrypto ( key , chain , null , PdfSignatureAppearance . WINCER _ SIGNED ) ; * sap . setReason ( " I ' m the author " ) ; * sap . setLocation ( " Lisbon " ) ; * / / comment next line to have an invisible signature * sap . setVisibleSignature ( new Rectangle ( 100 , 100 , 200 , 200 ) , 1 , null ) ; * stp . close ( ) ; * < / pre > * @ param reader the original document * @ param os the output stream or < CODE > null < / CODE > to keep the document in the temporary file * @ param pdfVersion the new pdf version or ' \ 0 ' to keep the same version as the original * document * @ param tempFile location of the temporary file . If it ' s a directory a temporary file will be created there . * If it ' s a file it will be used directly . The file will be deleted on exit unless < CODE > os < / CODE > is null . * In that case the document can be retrieved directly from the temporary file . If it ' s < CODE > null < / CODE > * no temporary file will be created and memory will be used * @ return a < CODE > PdfStamper < / CODE > * @ throws DocumentException on error * @ throws IOException on error */ public static PdfStamper createSignature ( PdfReader reader , OutputStream os , char pdfVersion , File tempFile ) throws DocumentException , IOException { } }
return createSignature ( reader , os , pdfVersion , tempFile , false ) ;
public class UrlHelper { /** * Returns an absolute URL for the specified path . * Example : If the current request URL is http : / / example . org / helloworld / internal / status , * { @ code absoluteHrefOf ( " / internal / health " ) } will return http : / / example . org / helloworld / internal / health ( with * helloworld as servletContextPath ) . * This method relies on Spring ' s { @ link org . springframework . web . context . request . RequestContextHolder } to find * the current request . * @ param path the relative url path * @ return returns the absolute href of the given path */ public static String absoluteHrefOf ( final String path ) { } }
try { return fromCurrentServletMapping ( ) . path ( path ) . build ( ) . toString ( ) ; } catch ( final IllegalStateException e ) { return path ; }
public class TypeEnter { /** * Generate default constructor for given class . For classes different * from java . lang . Object , this is : * c ( argtype _ 0 x _ 0 , . . . , argtype _ n x _ n ) throws thrown { * super ( x _ 0 , . . . , x _ n ) * or , if based = = true : * c ( argtype _ 0 x _ 0 , . . . , argtype _ n x _ n ) throws thrown { * x _ 0 . super ( x _ 1 , . . . , x _ n ) * @ param make The tree factory . * @ param c The class owning the default constructor . * @ param argtypes The parameter types of the constructor . * @ param thrown The thrown exceptions of the constructor . * @ param based Is first parameter a this $ n ? */ JCTree DefaultConstructor ( TreeMaker make , ClassSymbol c , MethodSymbol baseInit , List < Type > typarams , List < Type > argtypes , List < Type > thrown , long flags , boolean based ) { } }
JCTree result ; if ( ( c . flags ( ) & ENUM ) != 0 && ( types . supertype ( c . type ) . tsym == syms . enumSym ) ) { // constructors of true enums are private flags = ( flags & ~ AccessFlags ) | PRIVATE | GENERATEDCONSTR ; } else flags |= ( c . flags ( ) & AccessFlags ) | GENERATEDCONSTR ; if ( c . name . isEmpty ( ) ) { flags |= ANONCONSTR ; } Type mType = new MethodType ( argtypes , null , thrown , c ) ; Type initType = typarams . nonEmpty ( ) ? new ForAll ( typarams , mType ) : mType ; MethodSymbol init = new MethodSymbol ( flags , names . init , initType , c ) ; init . params = createDefaultConstructorParams ( make , baseInit , init , argtypes , based ) ; List < JCVariableDecl > params = make . Params ( argtypes , init ) ; List < JCStatement > stats = List . nil ( ) ; if ( c . type != syms . objectType ) { stats = stats . prepend ( SuperCall ( make , typarams , params , based ) ) ; } result = make . MethodDef ( init , make . Block ( 0 , stats ) ) ; return result ;
public class AmazonRoute53Client { /** * Retrieves a list of supported geographic locations . * Countries are listed first , and continents are listed last . If Amazon Route 53 supports subdivisions for a * country ( for example , states or provinces ) , the subdivisions for that country are listed in alphabetical order * immediately after the corresponding country . * @ param listGeoLocationsRequest * A request to get a list of geographic locations that Amazon Route 53 supports for geolocation resource * record sets . * @ return Result of the ListGeoLocations operation returned by the service . * @ throws InvalidInputException * The input is not valid . * @ sample AmazonRoute53 . ListGeoLocations * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / ListGeoLocations " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListGeoLocationsResult listGeoLocations ( ListGeoLocationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListGeoLocations ( request ) ;
public class EntityUtils { /** * Read the contents of an entity and return it as a byte array . * @ param entity * @ return byte array containing the entity content . May be null if * { @ link HttpEntity # getContent ( ) } is null . * @ throws IOException if an error occurs reading the input stream * @ throws IllegalArgumentException if entity is null or if content length > Integer . MAX _ VALUE */ public static byte [ ] toByteArray ( final HttpEntity entity ) throws IOException { } }
if ( entity == null ) { throw new IllegalArgumentException ( "HTTP entity may not be null" ) ; } InputStream instream = entity . getContent ( ) ; if ( instream == null ) { return null ; } try { if ( entity . getContentLength ( ) > Integer . MAX_VALUE ) { throw new IllegalArgumentException ( "HTTP entity too large to be buffered in memory" ) ; } int i = ( int ) entity . getContentLength ( ) ; if ( i < 0 ) { i = 4096 ; } ByteArrayBuffer buffer = new ByteArrayBuffer ( i ) ; byte [ ] tmp = new byte [ 4096 ] ; int l ; while ( ( l = instream . read ( tmp ) ) != - 1 ) { buffer . append ( tmp , 0 , l ) ; } return buffer . toByteArray ( ) ; } finally { instream . close ( ) ; }
public class WhiteboxImpl { /** * Check if parameter types are same . * @ param isVarArgs Whether or not the method or constructor contains var args . * @ param expectedParameterTypes the expected parameter types * @ param actualParameterTypes the actual parameter types * @ return if all actual parameter types are assignable from the expected * parameter types , otherwise . */ public static boolean checkIfParameterTypesAreSame ( boolean isVarArgs , Class < ? > [ ] expectedParameterTypes , Class < ? > [ ] actualParameterTypes ) { } }
return new ParameterTypesMatcher ( isVarArgs , expectedParameterTypes , actualParameterTypes ) . match ( ) ;
public class JSpinField { /** * Sets the value . This is a bound property . * @ param newValue * the new value * @ see # getValue */ public void setValue ( int newValue ) { } }
setValue ( newValue , true , true ) ; spinner . setValue ( new Integer ( value ) ) ;
public class FedoraPolicyStore { /** * ( non - Javadoc ) * @ see * org . fcrepo . server . security . xacml . pdp . data . PolicyDataManager # addPolicy * ( java . lang . String , java . lang . String ) */ @ Override public String addPolicy ( String document , String name ) throws PolicyStoreException { } }
String policyName ; if ( name == null || name . isEmpty ( ) ) { // no policy name , derive from document // ( note : policy ID is mandatory according to schema ) try { policyName = utils . getPolicyName ( document ) ; } catch ( MelcoePDPException e ) { throw new PolicyStoreException ( "Could not get policy name from policy" , e ) ; } // if name from document contains pid separator , escape it if ( name . contains ( ":" ) ) { name = name . replace ( ":" , PID_SEPARATOR_ESCAPED ) ; } } else { policyName = name ; } String pid = this . getPID ( policyName ) ; ObjectProfile objectProfile = null ; try { objectProfile = this . apiAService . getObjectProfile ( getContext ( ) , pid , null ) ; } catch ( ObjectNotInLowlevelStorageException e ) { } catch ( ServerException e ) { throw new PolicyStoreException ( "Add: error getting object profile for " + pid + " - " + e . getMessage ( ) , e ) ; } if ( objectProfile != null ) { // object exists , check state if ( objectProfile . objectState != "D" ) { throw new PolicyStoreException ( "Add: attempting to add policy " + pid + " but it already exists" ) ; } // deleted object : set state to active and do an update instead try { this . apiMService . modifyObject ( getContext ( ) , pid , "A" , objectProfile . objectLabel , objectProfile . objectOwnerId , "Fedora policy manager: Adding policy by activating deleted object" , null ) ; } catch ( ServerException e ) { throw new PolicyStoreException ( "Add: " + e . getMessage ( ) , e ) ; } this . updatePolicy ( policyName , document ) ; return pid ; } else { // create new object // if control group is M - managed - we need a temp location for the datastream String dsLocationOrContent = null ; if ( datastreamControlGroup . equals ( "M" ) ) { try { ByteArrayInputStream is = new ByteArrayInputStream ( document . getBytes ( "UTF-8" ) ) ; dsLocationOrContent = apiMService . putTempStream ( getContext ( ) , is ) ; } catch ( Exception e ) { throw new PolicyStoreException ( "Add: error generating temp datastream location - " + e . getMessage ( ) , e ) ; } } else { dsLocationOrContent = document ; } try { return apiMService . ingest ( getContext ( ) , new ByteArrayInputStream ( getFOXMLPolicyTemplate ( pid , "XACML policy " + policyName , contentModel , collection , collectionRelationship , dsLocationOrContent , datastreamControlGroup ) . getBytes ( "UTF-8" ) ) , "Fedora Policy Manager creating policy" , Constants . FOXML1_1 . uri , "UTF-8" , "" ) ; } catch ( Exception e ) { throw new PolicyStoreException ( "Add: error ingesting " + pid + " - " + e . getMessage ( ) , e ) ; } }
public class AmazonCodeDeployClient { /** * Lists the deployment groups for an application registered with the IAM user or AWS account . * @ param listDeploymentGroupsRequest * Represents the input of a ListDeploymentGroups operation . * @ return Result of the ListDeploymentGroups operation returned by the service . * @ throws ApplicationNameRequiredException * The minimum number of required application names was not specified . * @ throws InvalidApplicationNameException * The application name was specified in an invalid format . * @ throws ApplicationDoesNotExistException * The application does not exist with the IAM user or AWS account . * @ throws InvalidNextTokenException * The next token was specified in an invalid format . * @ sample AmazonCodeDeploy . ListDeploymentGroups * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codedeploy - 2014-10-06 / ListDeploymentGroups " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListDeploymentGroupsResult listDeploymentGroups ( ListDeploymentGroupsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListDeploymentGroups ( request ) ;
public class TypesImpl { /** * The direct superclass is the class from whose implementation the * implementation of the current class is derived . * @ param t * @ return */ @ Override public List < ? extends TypeMirror > directSupertypes ( TypeMirror t ) { } }
switch ( t . getKind ( ) ) { case DECLARED : DeclaredType dt = ( DeclaredType ) t ; TypeElement te = ( TypeElement ) dt . asElement ( ) ; List < TypeMirror > list = new ArrayList < > ( ) ; TypeElement superclass = ( TypeElement ) asElement ( te . getSuperclass ( ) ) ; if ( superclass != null ) { list . add ( 0 , superclass . asType ( ) ) ; } list . addAll ( te . getInterfaces ( ) ) ; return list ; case EXECUTABLE : case PACKAGE : throw new IllegalArgumentException ( t . getKind ( ) . name ( ) ) ; default : throw new UnsupportedOperationException ( t . getKind ( ) . name ( ) + " not supported yet" ) ; }
public class LookupManagerImpl { /** * Get the DirectoryLookupService to do the lookup . * It is thread safe and lazy initialized . * @ return * the LookupService . */ private DirectoryLookupService getLookupService ( ) { } }
if ( lookupService == null ) { synchronized ( this ) { if ( lookupService == null ) { boolean cacheEnabled = Configurations . getBoolean ( SD_API_CACHE_ENABLED_PROPERTY , SD_API_CACHE_ENABLED_DEFAULT ) ; if ( cacheEnabled ) { CachedDirectoryLookupService service = new CachedDirectoryLookupService ( directoryServiceClientManager ) ; service . start ( ) ; lookupService = service ; LOGGER . info ( "Created the CachedDirectoryLookupService in LookupManager" ) ; } else { lookupService = new DirectoryLookupService ( directoryServiceClientManager ) ; LOGGER . info ( "Created the DirectoryLookupService in LookupManager" ) ; } } } } return lookupService ;
public class ResponseBuilder { /** * Removes < speak > < / speak > XML tag in speechOutput * @ param outputSpeech output speech * @ return trimmed output speech */ private String trimOutputSpeech ( String outputSpeech ) { } }
if ( outputSpeech == null ) { return "" ; } String trimmedOutputSpeech = outputSpeech . trim ( ) ; if ( trimmedOutputSpeech . startsWith ( "<speak>" ) && trimmedOutputSpeech . endsWith ( "</speak>" ) ) { return trimmedOutputSpeech . substring ( 7 , trimmedOutputSpeech . length ( ) - 8 ) . trim ( ) ; } return trimmedOutputSpeech ;
public class Builder { /** * Closes the last opened tag or section . * @ return this builder * @ throws IllegalStateException if there are no pending tags to close */ public Builder end ( ) { } }
if ( ends . isEmpty ( ) ) throw new IllegalStateException ( "No pending tag/section to close." ) ; String tag = ends . pop ( ) ; html . a ( tag ) ; if ( newLineAfterTheseTags . contains ( tag . toLowerCase ( ) ) ) { html . nl ( ) ; text . nl ( ) ; } return this ;
public class TypedValue { /** * Sets the value of the test property . * @ param value * allowed object is * { @ link AValue } */ public void setValueItem ( AValue value ) { } }
this . avalue = value ; if ( value != null ) { Object o = SQLValueConverter . convertFromAValue ( value ) ; if ( o != null ) { if ( o instanceof QualifiedName ) { this . value = o ; } else { this . value = o . toString ( ) ; } } }
public class FailsafeExecutor { /** * Executes the { @ code supplier } asynchronously until a successful result is returned or the configured policies are * exceeded . * If a configured circuit breaker is open , the resulting future is completed with { @ link * CircuitBreakerOpenException } . * @ throws NullPointerException if the { @ code supplier } is null * @ throws RejectedExecutionException if the { @ code supplier } cannot be scheduled for execution */ public < T extends R > CompletableFuture < T > getAsync ( CheckedSupplier < T > supplier ) { } }
return callAsync ( execution -> Functions . promiseOf ( supplier , execution ) , false ) ;
public class Times { /** * Gets the day end time with the specified time . * @ param time the specified time * @ return day end time */ public static long getDayEndTime ( final long time ) { } }
final Calendar end = Calendar . getInstance ( ) ; end . setTimeInMillis ( time ) ; final int year = end . get ( Calendar . YEAR ) ; final int month = end . get ( Calendar . MONTH ) ; final int day = end . get ( Calendar . DATE ) ; end . set ( year , month , day , 23 , 59 , 59 ) ; end . set ( Calendar . MILLISECOND , 999 ) ; return end . getTimeInMillis ( ) ;
public class onlinkipv6prefix { /** * Use this API to unset the properties of onlinkipv6prefix resources . * Properties that need to be unset are specified in args array . */ public static base_responses unset ( nitro_service client , onlinkipv6prefix resources [ ] , String [ ] args ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { onlinkipv6prefix unsetresources [ ] = new onlinkipv6prefix [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { unsetresources [ i ] = new onlinkipv6prefix ( ) ; unsetresources [ i ] . ipv6prefix = resources [ i ] . ipv6prefix ; } result = unset_bulk_request ( client , unsetresources , args ) ; } return result ;
public class DateFormat { /** * Returns the date formatter with the given formatting style * for the given locale . * @ param style the given formatting style . For example , * SHORT for " M / d / yy " in the US locale . As currently implemented , relative date * formatting only affects a limited range of calendar days before or after the * current date , based on the CLDR & lt ; field type = " day " & gt ; / & lt ; relative & gt ; data : For example , * in English , " Yesterday " , " Today " , and " Tomorrow " . Outside of this range , relative * dates are formatted using the corresponding non - relative style . * @ param aLocale the given locale . * @ return a date formatter . */ public final static DateFormat getDateInstance ( int style , Locale aLocale ) { } }
return get ( style , - 1 , ULocale . forLocale ( aLocale ) , null ) ;
public class NetUtils { /** * Turns an Inet4Address into a 32 - bit integer representation * @ param addr address * @ return integer representation */ public static int ipv4ToInt ( final Inet4Address addr ) { } }
int value = 0 ; for ( byte chunk : addr . getAddress ( ) ) { value <<= 8 ; value |= chunk & 0xff ; } return value ;
public class UriUtils { /** * Returns a pair of strings created from two strings . * @ param s1 The first string . * @ param s2 The second string . * @ return A pair of strings created from the two given strings . */ public static Pair < String , String > pair ( final String s1 , final String s2 ) { } }
if ( StringUtils . isBlank ( s1 ) ) { LOG . warn ( "Blank first arg" ) ; } if ( StringUtils . isBlank ( s2 ) ) { LOG . warn ( "Blank second arg for: " + s1 ) ; } return new PairImpl < String , String > ( s1 , s2 ) ;
public class LoggingService { /** * Makes the provided log lines available to all log tail subscribers * @ param lines */ private void storeWithSubscribers ( final List < LogLineTableEntity > lines ) { } }
synchronized ( subscribers ) { if ( subscribers . isEmpty ( ) ) return ; // No subscribers , ignore call for ( LogSubscriber subscriber : subscribers ) { subscriber . append ( lines ) ; } if ( System . currentTimeMillis ( ) > nextSubscriberPurge ) { purgeIdleSubscribers ( ) ; nextSubscriberPurge = System . currentTimeMillis ( ) + purgeSubscriberInterval . getMilliseconds ( ) ; } }
public class RemoteServiceProxy { /** * Returns a { @ link com . google . gwt . user . client . rpc . SerializationStreamReader * SerializationStreamReader } that is ready for reading . * @ param encoded string that encodes the response of an RPC request * @ return { @ link com . google . gwt . user . client . rpc . SerializationStreamReader * SerializationStreamReader } that is ready for reading * @ throws SerializationException */ public SerializationStreamReader createStreamReader ( String encoded ) throws SerializationException { } }
ClientSerializationStreamReader clientSerializationStreamReader = new ClientSerializationStreamReader ( serializer ) ; clientSerializationStreamReader . prepareToRead ( getEncodedInstance ( encoded ) ) ; return clientSerializationStreamReader ;
public class ArrayUtils { /** * Returns the element at the given index in the { @ code array } . * @ param < T > { @ link Class } type of elements in the array . * @ param array array from which to extract the given element at index . * @ param index integer indicating the index of the element in the array to return . * @ param defaultValue default value to return if the array is { @ literal null } , empty * or does not have an element at the given index . * @ return the element at the given index in the array , or returns the default value * if the array is { @ literal null } , empty or does not contain an element at the given index . * @ see # getElementAt ( Object [ ] , int , Object ) */ public static < T > T getElementAt ( T [ ] array , int index , T defaultValue ) { } }
return nullSafeLength ( array ) > index ? array [ index ] : defaultValue ;
public class ObjectClassDefinitionSpecification { /** * Helper method to filter between required and optional ADs * @ param isRequired * @ return */ private AttributeDefinitionSpecification [ ] getADs ( boolean isRequired ) { } }
AttributeDefinitionSpecification [ ] retVal = null ; Vector < AttributeDefinitionSpecification > vector = new Vector < AttributeDefinitionSpecification > ( ) ; for ( Map . Entry < String , AttributeDefinitionSpecification > entry : attributes . entrySet ( ) ) { AttributeDefinitionSpecification ad = entry . getValue ( ) ; if ( isRequired == ad . isRequired ( ) ) { vector . add ( ad ) ; } } retVal = new AttributeDefinitionSpecification [ vector . size ( ) ] ; vector . toArray ( retVal ) ; return retVal ;
public class ISUPMessageImpl { /** * takes care of endoding parameters - poniters and actual parameters . * @ param parameters - list of parameters * @ param bos - output * @ param isOptionalPartPresent - if < b > true < / b > this will encode pointer to point for start of optional part , otherwise it * will encode this octet as zeros * @ throws ParameterException */ protected void encodeMandatoryVariableParameters ( Map < Integer , ISUPParameter > parameters , ByteArrayOutputStream bos , boolean isOptionalPartPresent ) throws ParameterException { } }
try { byte [ ] pointers = null ; // complicated if ( ! mandatoryVariablePartPossible ( ) ) { // we ommit pointer to this part , go straight for optional pointer . if ( optionalPartIsPossible ( ) ) { if ( isOptionalPartPresent ) { pointers = new byte [ ] { 0x01 } ; } else { // zeros pointers = new byte [ ] { 0x00 } ; } bos . write ( pointers ) ; } else { // do nothing ? } } else { if ( optionalPartIsPossible ( ) ) { pointers = new byte [ parameters . size ( ) + 1 ] ; } else { pointers = new byte [ parameters . size ( ) ] ; } ByteArrayOutputStream parametersBodyBOS = new ByteArrayOutputStream ( ) ; byte lastParameterLength = 0 ; byte currentParameterLength = 0 ; for ( int index = 0 ; index < parameters . size ( ) ; index ++ ) { AbstractISUPParameter p = ( AbstractISUPParameter ) parameters . get ( index ) ; byte [ ] body = p . encode ( ) ; currentParameterLength = ( byte ) body . length ; if ( body . length > 255 ) { // FIXME : is this check valid ? throw new ParameterException ( "Length of body must not be greater than one octet - 255 " ) ; } if ( index == 0 ) { lastParameterLength = currentParameterLength ; // This creates pointer to first mandatory variable param , // check on optional is required , since if its not defined // by message , pointer is omited . pointers [ index ] = ( byte ) ( parameters . size ( ) + ( optionalPartIsPossible ( ) ? 1 : 0 ) ) ; } else { pointers [ index ] = ( byte ) ( pointers [ index - 1 ] + lastParameterLength ) ; lastParameterLength = currentParameterLength ; } parametersBodyBOS . write ( currentParameterLength ) ; parametersBodyBOS . write ( body ) ; } // we ommit pointer to this part , go straight for optional pointer . if ( optionalPartIsPossible ( ) ) { if ( isOptionalPartPresent ) { pointers [ pointers . length - 1 ] = ( byte ) ( pointers [ pointers . length - 2 ] + lastParameterLength ) ; } else { // zeros // pointers = new byte [ ] { 0x00 } ; } } else { // do nothing ? } bos . write ( pointers ) ; bos . write ( parametersBodyBOS . toByteArray ( ) ) ; } } catch ( ParameterException pe ) { throw pe ; } catch ( Exception e ) { throw new ParameterException ( e ) ; }
public class CertificatesImpl { /** * Deletes a certificate from the specified account . * You cannot delete a certificate if a resource ( pool or compute node ) is using it . Before you can delete a certificate , you must therefore make sure that the certificate is not associated with any existing pools , the certificate is not installed on any compute nodes ( even if you remove a certificate from a pool , it is not removed from existing compute nodes in that pool until they restart ) , and no running tasks depend on the certificate . If you try to delete a certificate that is in use , the deletion fails . The certificate status changes to deleteFailed . You can use Cancel Delete Certificate to set the status back to active if you decide that you want to continue using the certificate . * @ param thumbprintAlgorithm The algorithm used to derive the thumbprint parameter . This must be sha1. * @ param thumbprint The thumbprint of the certificate to be deleted . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < ServiceResponseWithHeaders < Void , CertificateDeleteHeaders > > deleteWithServiceResponseAsync ( String thumbprintAlgorithm , String thumbprint ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( thumbprintAlgorithm == null ) { throw new IllegalArgumentException ( "Parameter thumbprintAlgorithm is required and cannot be null." ) ; } if ( thumbprint == null ) { throw new IllegalArgumentException ( "Parameter thumbprint is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final CertificateDeleteOptions certificateDeleteOptions = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . delete ( thumbprintAlgorithm , thumbprint , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Void , CertificateDeleteHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Void , CertificateDeleteHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < Void , CertificateDeleteHeaders > clientResponse = deleteDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class BubbleChart { /** * Add a value * @ param x x * @ param y y * @ param radius radius * @ param label label */ public void addValue ( Float x , Float y , Float radius , String label ) { } }
bubbleData . addValue ( new BubbleItem ( x , y , radius , label ) ) ;
public class ArgumentDefinition { /** * Composes the help string on the possible options an { @ link Enum } typed argument can take . * @ param clazz target enum class . Assumed no to be { @ code null } . * @ param < T > enum class type . * @ throws CommandLineException if { @ code & lt ; T & gt ; } has no constants . * @ return never { @ code null } . */ private static < T extends Enum < T > > String getEnumOptions ( final Class < T > clazz ) { } }
// We assume that clazz is guaranteed to be a Class < ? extends Enum > , thus // getEnumConstants ( ) won ' t ever return a null . final T [ ] enumConstants = clazz . getEnumConstants ( ) ; if ( enumConstants . length == 0 ) { throw new CommandLineException ( String . format ( "Bad argument enum type '%s' with no options" , clazz . getName ( ) ) ) ; } if ( CommandLineParser . ClpEnum . class . isAssignableFrom ( clazz ) ) { return Stream . of ( enumConstants ) . map ( c -> String . format ( "%s (%s)" , c . name ( ) , ( ( CommandLineParser . ClpEnum ) c ) . getHelpDoc ( ) ) ) . collect ( Collectors . joining ( "\n" ) ) ; } else { return Stream . of ( enumConstants ) . map ( T :: name ) . collect ( Collectors . joining ( ", " , OPTION_DOC_PREFIX , OPTION_DOC_SUFFIX ) ) ; }
public class CustomizableFocusTraversalPolicy { /** * Sets a custom focus traversal order for the given container . Child * components for which there is no order specified will receive focus after * components that do have an order specified in the standard " layout " * order . * @ param container * the container * @ param componentsInOrder * a list of child components in the order that thay should * receive focus */ public static void customizeFocusTraversalOrder ( JComponent container , java . util . List componentsInOrder ) { } }
for ( Iterator i = componentsInOrder . iterator ( ) ; i . hasNext ( ) ; ) { Component comp = ( Component ) i . next ( ) ; if ( comp . getParent ( ) != container ) { throw new IllegalArgumentException ( "Component [" + comp + "] is not a child of [" + container + "]." ) ; } } container . putClientProperty ( FOCUS_ORDER_PROPERTY_NAME , createOrderMapFromList ( componentsInOrder ) ) ;
public class ForgetSmartHomeAppliancesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ForgetSmartHomeAppliancesRequest forgetSmartHomeAppliancesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( forgetSmartHomeAppliancesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( forgetSmartHomeAppliancesRequest . getRoomArn ( ) , ROOMARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CopyDataPublisher { /** * Publish data for a { @ link CopyableDataset } . */ private void publishFileSet ( CopyEntity . DatasetAndPartition datasetAndPartition , Collection < WorkUnitState > datasetWorkUnitStates ) throws IOException { } }
Map < String , String > additionalMetadata = Maps . newHashMap ( ) ; Preconditions . checkArgument ( ! datasetWorkUnitStates . isEmpty ( ) , "publishFileSet received an empty collection work units. This is an error in code." ) ; CopyableDatasetMetadata metadata = CopyableDatasetMetadata . deserialize ( datasetWorkUnitStates . iterator ( ) . next ( ) . getProp ( CopySource . SERIALIZED_COPYABLE_DATASET ) ) ; Path datasetWriterOutputPath = new Path ( this . writerOutputDir , datasetAndPartition . identifier ( ) ) ; log . info ( "Merging all split work units." ) ; DistcpFileSplitter . mergeAllSplitWorkUnits ( this . fs , datasetWorkUnitStates ) ; log . info ( String . format ( "[%s] Publishing fileSet from %s for dataset %s" , datasetAndPartition . identifier ( ) , datasetWriterOutputPath , metadata . getDatasetURN ( ) ) ) ; List < CommitStep > prePublish = getCommitSequence ( datasetWorkUnitStates , PrePublishStep . class ) ; List < CommitStep > postPublish = getCommitSequence ( datasetWorkUnitStates , PostPublishStep . class ) ; log . info ( String . format ( "[%s] Found %d prePublish steps and %d postPublish steps." , datasetAndPartition . identifier ( ) , prePublish . size ( ) , postPublish . size ( ) ) ) ; executeCommitSequence ( prePublish ) ; if ( hasCopyableFiles ( datasetWorkUnitStates ) ) { // Targets are always absolute , so we start moving from root ( will skip any existing directories ) . HadoopUtils . renameRecursively ( this . fs , datasetWriterOutputPath , new Path ( "/" ) ) ; } else { log . info ( String . format ( "[%s] No copyable files in dataset. Proceeding to postpublish steps." , datasetAndPartition . identifier ( ) ) ) ; } executeCommitSequence ( postPublish ) ; this . fs . delete ( datasetWriterOutputPath , true ) ; long datasetOriginTimestamp = Long . MAX_VALUE ; long datasetUpstreamTimestamp = Long . MAX_VALUE ; Optional < String > fileSetRoot = Optional . < String > absent ( ) ; for ( WorkUnitState wus : datasetWorkUnitStates ) { if ( wus . getWorkingState ( ) == WorkingState . SUCCESSFUL ) { wus . setWorkingState ( WorkUnitState . WorkingState . COMMITTED ) ; } CopyEntity copyEntity = CopySource . deserializeCopyEntity ( wus ) ; if ( copyEntity instanceof CopyableFile ) { CopyableFile copyableFile = ( CopyableFile ) copyEntity ; DataFileVersionStrategy srcVS = this . srcDataFileVersionStrategy ; DataFileVersionStrategy dstVS = this . dstDataFileVersionStrategy ; // Prefer to use copyableFile ' s specific version strategy if ( copyableFile . getDataFileVersionStrategy ( ) != null ) { Config versionStrategyConfig = ConfigFactory . parseMap ( ImmutableMap . of ( DataFileVersionStrategy . DATA_FILE_VERSION_STRATEGY_KEY , copyableFile . getDataFileVersionStrategy ( ) ) ) ; srcVS = DataFileVersionStrategy . instantiateDataFileVersionStrategy ( this . srcFs , versionStrategyConfig ) ; dstVS = DataFileVersionStrategy . instantiateDataFileVersionStrategy ( this . fs , versionStrategyConfig ) ; } if ( copyableFile . getPreserve ( ) . preserve ( PreserveAttributes . Option . VERSION ) && dstVS . hasCharacteristic ( DataFileVersionStrategy . Characteristic . SETTABLE ) ) { dstVS . setVersion ( copyableFile . getDestination ( ) , srcVS . getVersion ( copyableFile . getOrigin ( ) . getPath ( ) ) ) ; } if ( wus . getWorkingState ( ) == WorkingState . COMMITTED ) { CopyEventSubmitterHelper . submitSuccessfulFilePublish ( this . eventSubmitter , copyableFile , wus ) ; // Dataset Output path is injected in each copyableFile . // This can be optimized by having a dataset level equivalent class for copyable entities // and storing dataset related information , e . g . dataset output path , there . // Currently datasetOutputPath is only present for hive datasets . if ( ! fileSetRoot . isPresent ( ) && copyableFile . getDatasetOutputPath ( ) != null ) { fileSetRoot = Optional . of ( copyableFile . getDatasetOutputPath ( ) ) ; } if ( lineageInfo . isPresent ( ) ) { lineageInfo . get ( ) . putDestination ( copyableFile . getDestinationData ( ) , 0 , wus ) ; } } if ( datasetOriginTimestamp > copyableFile . getOriginTimestamp ( ) ) { datasetOriginTimestamp = copyableFile . getOriginTimestamp ( ) ; } if ( datasetUpstreamTimestamp > copyableFile . getUpstreamTimestamp ( ) ) { datasetUpstreamTimestamp = copyableFile . getUpstreamTimestamp ( ) ; } } } // if there are no valid values for datasetOriginTimestamp and datasetUpstreamTimestamp , use // something more readable if ( Long . MAX_VALUE == datasetOriginTimestamp ) { datasetOriginTimestamp = 0 ; } if ( Long . MAX_VALUE == datasetUpstreamTimestamp ) { datasetUpstreamTimestamp = 0 ; } additionalMetadata . put ( SlaEventKeys . SOURCE_URI , this . state . getProp ( SlaEventKeys . SOURCE_URI ) ) ; additionalMetadata . put ( SlaEventKeys . DESTINATION_URI , this . state . getProp ( SlaEventKeys . DESTINATION_URI ) ) ; additionalMetadata . put ( SlaEventKeys . DATASET_OUTPUT_PATH , fileSetRoot . or ( "Unknown" ) ) ; CopyEventSubmitterHelper . submitSuccessfulDatasetPublish ( this . eventSubmitter , datasetAndPartition , Long . toString ( datasetOriginTimestamp ) , Long . toString ( datasetUpstreamTimestamp ) , additionalMetadata ) ;
public class ProtectableContainersInner { /** * Lists the containers registered to Recovery Services Vault . * @ param vaultName The name of the recovery services vault . * @ param resourceGroupName The name of the resource group where the recovery services vault is present . * @ param fabricName Fabric name associated with the container . * @ param filter OData filter options . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ProtectableContainerResourceInner & gt ; object */ public Observable < Page < ProtectableContainerResourceInner > > listAsync ( final String vaultName , final String resourceGroupName , final String fabricName , final String filter ) { } }
return listWithServiceResponseAsync ( vaultName , resourceGroupName , fabricName , filter ) . map ( new Func1 < ServiceResponse < Page < ProtectableContainerResourceInner > > , Page < ProtectableContainerResourceInner > > ( ) { @ Override public Page < ProtectableContainerResourceInner > call ( ServiceResponse < Page < ProtectableContainerResourceInner > > response ) { return response . body ( ) ; } } ) ;
public class SourceTableFeatureDetails { /** * Represents the LSI properties for the table when the backup was created . It includes the IndexName , KeySchema and * Projection for the LSIs on the table at the time of backup . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLocalSecondaryIndexes ( java . util . Collection ) } or * { @ link # withLocalSecondaryIndexes ( java . util . Collection ) } if you want to override the existing values . * @ param localSecondaryIndexes * Represents the LSI properties for the table when the backup was created . It includes the IndexName , * KeySchema and Projection for the LSIs on the table at the time of backup . * @ return Returns a reference to this object so that method calls can be chained together . */ public SourceTableFeatureDetails withLocalSecondaryIndexes ( LocalSecondaryIndexInfo ... localSecondaryIndexes ) { } }
if ( this . localSecondaryIndexes == null ) { setLocalSecondaryIndexes ( new java . util . ArrayList < LocalSecondaryIndexInfo > ( localSecondaryIndexes . length ) ) ; } for ( LocalSecondaryIndexInfo ele : localSecondaryIndexes ) { this . localSecondaryIndexes . add ( ele ) ; } return this ;
public class MyTableModel { /** * Re - evaluates the expressions in the table . */ void updateModel ( ) { } }
for ( int i = 0 ; i < expressions . size ( ) ; ++ i ) { String expr = expressions . get ( i ) ; String result = "" ; if ( expr . length ( ) > 0 ) { result = debugGui . dim . eval ( expr ) ; if ( result == null ) result = "" ; } else { result = "" ; } result = result . replace ( '\n' , ' ' ) ; values . set ( i , result ) ; } fireTableDataChanged ( ) ;
public class ViewSelectorAssertions { /** * Fluent assertion entry point for a selection of views from the given activity * based on the given selector . It may be helpful to statically import this rather * than { @ link # assertThat ( ViewSelection ) } to avoid conflicts with other statically * imported { @ code assertThat ( ) } methods . */ public static ViewSelectionAssert assertThatSelection ( String selector , Activity activity ) { } }
return assertThat ( selection ( selector , activity ) ) ;
public class CPDefinitionServiceBaseImpl { /** * Sets the asset category remote service . * @ param assetCategoryService the asset category remote service */ public void setAssetCategoryService ( com . liferay . asset . kernel . service . AssetCategoryService assetCategoryService ) { } }
this . assetCategoryService = assetCategoryService ;
public class NameSpace { /** * Sets a variable or property . See " setVariable " for rules regarding * scoping . * We first check for the existence of the variable . If it exists , we set * it . If the variable does not exist we look for a property . If the * property exists and is writable we set it . Finally , if neither the * variable or the property exist , we create a new variable . * @ param name the name * @ param value the value * @ param strictJava specifies whether strict java rules are applied . * @ throws UtilEvalError the util eval error */ public void setVariableOrProperty ( final String name , final Object value , final boolean strictJava ) throws UtilEvalError { } }
this . setVariableOrProperty ( name , value , strictJava , true ) ;
public class PushbackReader { /** * Reads a single character . * @ return The character read , or - 1 if the end of the stream has been * reached * @ exception IOException If an I / O error occurs */ public int read ( ) throws IOException { } }
synchronized ( lock ) { ensureOpen ( ) ; if ( pos < buf . length ) return buf [ pos ++ ] ; else return super . read ( ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcSensorType ( ) { } }
if ( ifcSensorTypeEClass == null ) { ifcSensorTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 592 ) ; } return ifcSensorTypeEClass ;
public class JesqueUtils { /** * Join the given strings , separated by the given separator . * @ param sep * the separator * @ param strs * the strings to join * @ return the joined string */ public static String join ( final String sep , final String ... strs ) { } }
return join ( sep , Arrays . asList ( strs ) ) ;
public class AWSApplicationDiscoveryClient { /** * Associates one or more configuration items with an application . * @ param associateConfigurationItemsToApplicationRequest * @ return Result of the AssociateConfigurationItemsToApplication operation returned by the service . * @ throws AuthorizationErrorException * The AWS user account does not have permission to perform the action . Check the IAM policy associated with * this account . * @ throws InvalidParameterException * One or more parameters are not valid . Verify the parameters and try again . * @ throws InvalidParameterValueException * The value of one or more parameters are either invalid or out of range . Verify the parameter values and * try again . * @ throws ServerInternalErrorException * The server experienced an internal error . Try again . * @ sample AWSApplicationDiscovery . AssociateConfigurationItemsToApplication */ @ Override public AssociateConfigurationItemsToApplicationResult associateConfigurationItemsToApplication ( AssociateConfigurationItemsToApplicationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociateConfigurationItemsToApplication ( request ) ;
public class EnableOnPhysicalHandler { /** * Constructor . * @ param record My owner ( usually passed as null , and set on addListener in setOwner ( ) ) . * @ param field Target field . * @ param iFieldSeq Target field . * @ param bEnbleOnValid Enable / disable the fields on valid . * @ param bEnableOnNew Enable / disable the fields on new . * @ param flagField If this flag is true , do the opposite enable / disable . */ public void init ( Record record ) { } }
super . init ( record , null , null , true , true , null ) ;
public class BitZMarketDataServiceRaw { /** * TODO : Exception Handling - See Bitfinex */ public BitZKline getBitZKline ( String pair , String type ) throws IOException { } }
return bitz . getKlineResult ( pair , type ) . getData ( ) ;
public class ImportNodeData { /** * { @ inheritDoc } */ public AccessControlList getACL ( ) { } }
if ( exoPrivileges != null || exoOwner != null ) { return ACLInitializationHelper . initAcl ( super . getACL ( ) , exoOwner , exoPrivileges ) ; } return super . getACL ( ) ;
public class BdbStoreFactory { /** * Creates a Berkeley DB database environment from the provided environment * configuration . * @ return an environment instance . * @ throws SecurityException if the directory for storing the databases * could not be created . * @ throws EnvironmentNotFoundException if the environment does not exist * ( does not contain at least one log file ) and the EnvironmentConfig * AllowCreate parameter is false . * @ throws EnvironmentLockedException when an environment cannot be opened * for write access because another process has the same environment open * for write access . Warning : This exception should be handled when an * environment is opened by more than one process . * @ throws VersionMismatchException when the existing log is not compatible * with the version of JE that is running . This occurs when a later version * of JE was used to create the log . Warning : This exception should be * handled when more than one version of JE may be used to access an * environment . * @ throws EnvironmentFailureException if an unexpected , internal or * environment - wide failure occurs . * @ throws java . lang . UnsupportedOperationException if this environment was * previously opened for replication and is not being opened read - only . * @ throws java . lang . IllegalArgumentException if an invalid parameter is * specified , for example , an invalid EnvironmentConfig parameter . */ private Environment createEnvironment ( ) { } }
EnvironmentConfig envConf = createEnvConfig ( ) ; if ( ! envFile . exists ( ) ) { envFile . mkdirs ( ) ; } LOGGER . log ( Level . INFO , "Initialized BerkeleyDB cache environment at {0}" , envFile . getAbsolutePath ( ) ) ; return new Environment ( this . envFile , envConf ) ;
public class QuestQueryProcessor { /** * Returns the final rewriting of the given query */ @ Override public String getRewritingRendering ( InputQuery query ) throws OntopReformulationException { } }
InternalSparqlQuery translation = query . translate ( inputQueryTranslator ) ; try { IQ converetedIQ = preProcess ( translation ) ; IQ rewrittenIQ = rewriter . rewrite ( converetedIQ ) ; return rewrittenIQ . toString ( ) ; } catch ( EmptyQueryException e ) { e . printStackTrace ( ) ; } return "EMPTY REWRITING" ;
public class Validate { /** * < p > Validate that the specified argument map is neither { @ code null } nor a size of zero ( no elements ) ; otherwise throwing an exception with the specified message . * < pre > Validate . notEmpty ( myMap , " The map must not be empty " ) ; < / pre > * @ param < T > * the map type * @ param map * the map to check , validated not null by this method * @ param message * the { @ link String # format ( String , Object . . . ) } exception message if invalid , not null * @ param values * the optional values for the formatted exception message , null array not recommended * @ return the validated map ( never { @ code null } method for chaining ) * @ throws NullPointerValidationException * if the map is { @ code null } * @ throws IllegalArgumentException * if the map is empty * @ see # notEmpty ( Object [ ] ) */ public static < T extends Map < ? , ? > > T notEmpty ( final T map , final String message , final Object ... values ) { } }
return INSTANCE . notEmpty ( map , message , values ) ;
public class VirtualMachineScaleSetsInner { /** * Deallocates specific virtual machines in a VM scale set . Shuts down the virtual machines and releases the compute resources . You are not billed for the compute resources that this virtual machine scale set deallocates . * @ param resourceGroupName The name of the resource group . * @ param vmScaleSetName The name of the VM scale set . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < OperationStatusResponseInner > deallocateAsync ( String resourceGroupName , String vmScaleSetName ) { } }
return deallocateWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) . map ( new Func1 < ServiceResponse < OperationStatusResponseInner > , OperationStatusResponseInner > ( ) { @ Override public OperationStatusResponseInner call ( ServiceResponse < OperationStatusResponseInner > response ) { return response . body ( ) ; } } ) ;
public class X509CertSelector { /** * Make a { @ code GeneralNameInterface } out of a name type ( 0-8 ) and an * Object that may be a byte array holding the ASN . 1 DER encoded * name or a String form of the name . Except for X . 509 * Distinguished Names , the String form of the name must not be the * result from calling toString on an existing GeneralNameInterface * implementing class . The output of toString is not compatible * with the String constructors for names other than Distinguished * Names . * @ param type name type ( 0-8) * @ param name name as ASN . 1 Der - encoded byte array or String * @ return a GeneralNameInterface name * @ throws IOException if a parsing error occurs */ static GeneralNameInterface makeGeneralNameInterface ( int type , Object name ) throws IOException { } }
GeneralNameInterface result ; if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralNameInterface(" + type + ")..." ) ; } if ( name instanceof String ) { if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralNameInterface() " + "name is String: " + name ) ; } switch ( type ) { case NAME_RFC822 : result = new RFC822Name ( ( String ) name ) ; break ; case NAME_DNS : result = new DNSName ( ( String ) name ) ; break ; case NAME_DIRECTORY : result = new X500Name ( ( String ) name ) ; break ; case NAME_URI : result = new URIName ( ( String ) name ) ; break ; case NAME_IP : result = new IPAddressName ( ( String ) name ) ; break ; case NAME_OID : result = new OIDName ( ( String ) name ) ; break ; default : throw new IOException ( "unable to parse String names of type " + type ) ; } if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralNameInterface() " + "result: " + result . toString ( ) ) ; } } else if ( name instanceof byte [ ] ) { DerValue val = new DerValue ( ( byte [ ] ) name ) ; if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralNameInterface() is byte[]" ) ; } switch ( type ) { case NAME_ANY : result = new OtherName ( val ) ; break ; case NAME_RFC822 : result = new RFC822Name ( val ) ; break ; case NAME_DNS : result = new DNSName ( val ) ; break ; case NAME_X400 : result = new X400Address ( val ) ; break ; case NAME_DIRECTORY : result = new X500Name ( val ) ; break ; case NAME_EDI : result = new EDIPartyName ( val ) ; break ; case NAME_URI : result = new URIName ( val ) ; break ; case NAME_IP : result = new IPAddressName ( val ) ; break ; case NAME_OID : result = new OIDName ( val ) ; break ; default : throw new IOException ( "unable to parse byte array names of " + "type " + type ) ; } if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralNameInterface() result: " + result . toString ( ) ) ; } } else { if ( debug != null ) { debug . println ( "X509CertSelector.makeGeneralName() input name " + "not String or byte array" ) ; } throw new IOException ( "name not String or byte array" ) ; } return result ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcPipeSegmentTypeEnum createIfcPipeSegmentTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcPipeSegmentTypeEnum result = IfcPipeSegmentTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class FessMessages { /** * Add the created action message for the key ' errors . suffix ' with parameters . * < pre > * message : & lt ; / li & gt ; * < / pre > * @ param property The property name for the message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addErrorsSuffix ( String property ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( ERRORS_SUFFIX ) ) ; return this ;
public class GraphRunner { /** * Write out the session options used * by this { @ link GraphRunner } * a s a json string using the * { @ link JsonFormat } * @ return */ public String sessionOptionsToJson ( ) { } }
try { return JsonFormat . printer ( ) . print ( protoBufConfigProto ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return null ;
public class ItemLevelRecoveryConnectionsInner { /** * Revokes an iSCSI connection which can be used to download a script . Executing this script opens a file explorer displaying all recoverable files and folders . This is an asynchronous operation . * @ param vaultName The name of the Recovery Services vault . * @ param resourceGroupName The name of the resource group associated with the Recovery Services vault . * @ param fabricName The fabric name associated with the backup items . The value allowed is Azure . * @ param containerName The container name associated with the backup items . * @ param protectedItemName The name of the backup items whose files or folders will be restored . * @ param recoveryPointId The string that identifies the recovery point . The iSCSI connection will be revoked for this protected data . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > revokeAsync ( String vaultName , String resourceGroupName , String fabricName , String containerName , String protectedItemName , String recoveryPointId , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( revokeWithServiceResponseAsync ( vaultName , resourceGroupName , fabricName , containerName , protectedItemName , recoveryPointId ) , serviceCallback ) ;
public class ResourceHandler { void sendDirectory ( HttpRequest request , HttpResponse response , Resource resource , boolean parent ) throws IOException { } }
if ( ! _dirAllowed ) { response . sendError ( HttpResponse . __403_Forbidden ) ; return ; } request . setHandled ( true ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "sendDirectory: " + resource ) ; byte [ ] data = null ; if ( resource instanceof CachedResource ) data = ( ( CachedResource ) resource ) . getCachedData ( ) ; if ( data == null ) { String base = URI . addPaths ( request . getPath ( ) , "/" ) ; String dir = resource . getListHTML ( URI . encodePath ( base ) , parent ) ; if ( dir == null ) { response . sendError ( HttpResponse . __403_Forbidden , "No directory" ) ; return ; } data = dir . getBytes ( "UTF8" ) ; if ( resource instanceof CachedResource ) ( ( CachedResource ) resource ) . setCachedData ( data ) ; } response . setContentType ( "text/html; charset=UTF8" ) ; response . setContentLength ( data . length ) ; if ( request . getMethod ( ) . equals ( HttpRequest . __HEAD ) ) { response . commit ( ) ; return ; } response . getOutputStream ( ) . write ( data , 0 , data . length ) ; response . commit ( ) ;
public class Frame { /** * Pushes a value into the operand stack of this frame . * @ param value * the value that must be pushed into the stack . * @ throws IndexOutOfBoundsException * if the operand stack is full . */ public void push ( final V value ) throws IndexOutOfBoundsException { } }
if ( top + locals >= values . length ) { throw new IndexOutOfBoundsException ( "Insufficient maximum stack size." ) ; } values [ top ++ + locals ] = value ;
public class Curve25519 { /** * / * Convert from internal format to little - endian byte format . The * number must be in a reduced form which is output by the following ops : * unpack , mul , sqr * set - - if input in range 0 . . P25 * If you ' re unsure if the number is reduced , first multiply it by 1. */ private static final void pack ( long10 x , byte [ ] m ) { } }
int ld = 0 , ud = 0 ; long t ; ld = ( is_overflow ( x ) ? 1 : 0 ) - ( ( x . _9 < 0 ) ? 1 : 0 ) ; ud = ld * - ( P25 + 1 ) ; ld *= 19 ; t = ld + x . _0 + ( x . _1 << 26 ) ; m [ 0 ] = ( byte ) t ; m [ 1 ] = ( byte ) ( t >> 8 ) ; m [ 2 ] = ( byte ) ( t >> 16 ) ; m [ 3 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( x . _2 << 19 ) ; m [ 4 ] = ( byte ) t ; m [ 5 ] = ( byte ) ( t >> 8 ) ; m [ 6 ] = ( byte ) ( t >> 16 ) ; m [ 7 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( x . _3 << 13 ) ; m [ 8 ] = ( byte ) t ; m [ 9 ] = ( byte ) ( t >> 8 ) ; m [ 10 ] = ( byte ) ( t >> 16 ) ; m [ 11 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( x . _4 << 6 ) ; m [ 12 ] = ( byte ) t ; m [ 13 ] = ( byte ) ( t >> 8 ) ; m [ 14 ] = ( byte ) ( t >> 16 ) ; m [ 15 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + x . _5 + ( x . _6 << 25 ) ; m [ 16 ] = ( byte ) t ; m [ 17 ] = ( byte ) ( t >> 8 ) ; m [ 18 ] = ( byte ) ( t >> 16 ) ; m [ 19 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( x . _7 << 19 ) ; m [ 20 ] = ( byte ) t ; m [ 21 ] = ( byte ) ( t >> 8 ) ; m [ 22 ] = ( byte ) ( t >> 16 ) ; m [ 23 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( x . _8 << 12 ) ; m [ 24 ] = ( byte ) t ; m [ 25 ] = ( byte ) ( t >> 8 ) ; m [ 26 ] = ( byte ) ( t >> 16 ) ; m [ 27 ] = ( byte ) ( t >> 24 ) ; t = ( t >> 32 ) + ( ( x . _9 + ud ) << 6 ) ; m [ 28 ] = ( byte ) t ; m [ 29 ] = ( byte ) ( t >> 8 ) ; m [ 30 ] = ( byte ) ( t >> 16 ) ; m [ 31 ] = ( byte ) ( t >> 24 ) ;
public class VarExporter { /** * Write all variables as a JSON object . Will not escape names or values . All values are * written as Strings . * @ param out writer */ public void dumpJson ( final PrintWriter out ) { } }
out . append ( "{" ) ; visitVariables ( new Visitor ( ) { int count = 0 ; public void visit ( Variable var ) { if ( count ++ > 0 ) { out . append ( ", " ) ; } out . append ( var . getName ( ) ) . append ( "='" ) . append ( String . valueOf ( var . getValue ( ) ) ) . append ( "'" ) ; } } ) ; out . append ( "}" ) ;
public class Lpc { /** * interpolates the log curve from the linear curve . */ void lpc_to_curve ( float [ ] curve , float [ ] lpc , float amp ) { } }
for ( int i = 0 ; i < ln * 2 ; i ++ ) curve [ i ] = 0.0f ; if ( amp == 0 ) return ; for ( int i = 0 ; i < m ; i ++ ) { curve [ i * 2 + 1 ] = lpc [ i ] / ( 4 * amp ) ; curve [ i * 2 + 2 ] = - lpc [ i ] / ( 4 * amp ) ; } fft . backward ( curve ) ; { int l2 = ln * 2 ; float unit = ( float ) ( 1. / amp ) ; curve [ 0 ] = ( float ) ( 1. / ( curve [ 0 ] * 2 + unit ) ) ; for ( int i = 1 ; i < ln ; i ++ ) { float real = ( curve [ i ] + curve [ l2 - i ] ) ; float imag = ( curve [ i ] - curve [ l2 - i ] ) ; float a = real + unit ; curve [ i ] = ( float ) ( 1.0 / FAST_HYPOT ( a , imag ) ) ; } }
public class FctConvertersToFromString { /** * < p > Create put CnvTfsHasId ( Composite ) . < / p > * @ param pBeanName - bean name * @ param pClass - bean class * @ param pIdName - bean ID name * @ return requested CnvTfsHasId ( String ) * @ throws Exception - an exception */ protected final CnvTfsHasId < IHasId < Object > , Object > createHasCompositeIdConverter ( final String pBeanName , final Class pClass , final String pIdName ) throws Exception { } }
CnvTfsHasId < IHasId < Object > , Object > convrt = new CnvTfsHasId < IHasId < Object > , Object > ( ) ; convrt . setUtlReflection ( getUtlReflection ( ) ) ; Field rapiFieldId = this . fieldsRapiHolder . getFor ( pClass , pIdName ) ; convrt . setIdConverter ( lazyGetCnvTfsObject ( rapiFieldId . getType ( ) ) ) ; convrt . init ( pClass , pIdName ) ; this . convertersMap . put ( pBeanName , convrt ) ; return convrt ;
public class ParquetGroup { /** * Add any object of { @ link PrimitiveType } or { @ link Group } type with a String key . * @ param key * @ param object */ public void add ( String key , Object object ) { } }
int fieldIndex = getIndex ( key ) ; if ( object . getClass ( ) == ParquetGroup . class ) { this . addGroup ( key , ( Group ) object ) ; } else { this . add ( fieldIndex , ( Primitive ) object ) ; }
public class FileUtils { /** * Copia el contenido de un fichero a otro en caso de error lanza una excepcion . * @ param source * @ param dest * @ throws IOException */ public static void copyFromFileToFile ( File source , File dest ) throws IOException { } }
try { FileInputStream in = new FileInputStream ( source ) ; FileOutputStream out = new FileOutputStream ( dest ) ; try { FileChannel canalFuente = in . getChannel ( ) ; FileChannel canalDestino = out . getChannel ( ) ; canalFuente . transferTo ( 0 , canalFuente . size ( ) , canalDestino ) ; } catch ( IOException e ) { throw new IOException ( "copiando ficheros orig:'" + source . getAbsolutePath ( ) + "' destino:'" + dest . getAbsolutePath ( ) + "'" , e ) ; } finally { IOUtils . closeQuietly ( in ) ; IOUtils . closeQuietly ( out ) ; // try { // in . close ( ) ; // } catch ( IOException e ) { // try { // out . close ( ) ; // } catch ( IOException e ) { } } catch ( FileNotFoundException e ) { throw new IOException ( "copiando ficheros orig:'" + source . getAbsolutePath ( ) + "' destino:'" + dest . getAbsolutePath ( ) + "'" , e ) ; }
public class RobustLoaderWriterResilienceStrategy { /** * Delete the key from the loader - writer if it is found with a matching value . Note that the load and write pair * is not atomic . This atomicity , if needed , should be handled by the something else . * @ param key the key being removed * @ param value the value being removed * @ param e the triggered failure * @ return if the value was removed */ @ Override public boolean removeFailure ( K key , V value , StoreAccessException e ) { } }
try { V loadedValue ; try { loadedValue = loaderWriter . load ( key ) ; } catch ( Exception e1 ) { throw ExceptionFactory . newCacheLoadingException ( e1 , e ) ; } if ( loadedValue == null ) { return false ; } if ( ! loadedValue . equals ( value ) ) { return false ; } try { loaderWriter . delete ( key ) ; } catch ( Exception e1 ) { throw ExceptionFactory . newCacheWritingException ( e1 , e ) ; } return true ; } finally { cleanup ( key , e ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getMBC ( ) { } }
if ( mbcEClass == null ) { mbcEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 287 ) ; } return mbcEClass ;
public class LogConfiguration { /** * The configuration options to send to the log driver . This parameter requires version 1.19 of the Docker Remote * API or greater on your container instance . To check the Docker Remote API version on your container instance , log * in to your container instance and run the following command : * < code > sudo docker version - - format ' { { . Server . APIVersion } } ' < / code > * @ param options * The configuration options to send to the log driver . This parameter requires version 1.19 of the Docker * Remote API or greater on your container instance . To check the Docker Remote API version on your container * instance , log in to your container instance and run the following command : * < code > sudo docker version - - format ' { { . Server . APIVersion } } ' < / code > * @ return Returns a reference to this object so that method calls can be chained together . */ public LogConfiguration withOptions ( java . util . Map < String , String > options ) { } }
setOptions ( options ) ; return this ;
public class Tracer { /** * Stop the trace . * This may only be done once and must be done from the same thread * that started it . * @ param silenceThreshold Traces for time less than silence _ threshold * ms will be left out of the trace report . A value of - 1 indicates * that the current ThreadTrace silence _ threshold should be used . * @ return The time that this trace actually ran */ long stop ( int silenceThreshold ) { } }
checkState ( Thread . currentThread ( ) == startThread ) ; ThreadTrace trace = getThreadTrace ( ) ; // Do nothing if the thread trace was not initialized . if ( ! trace . isInitialized ( ) ) { return 0 ; } stopTimeMs = clock . currentTimeMillis ( ) ; if ( extraTracingValues != null ) { // We use extraTracingValues . length rather than // extraTracingStatistics . size ( ) because a new statistic may // have been added for ( int i = 0 ; i < extraTracingValues . length ; i ++ ) { long value = extraTracingStatistics . get ( i ) . stop ( startThread ) ; extraTracingValues [ i ] = value - extraTracingValues [ i ] ; } } // Do nothing if the thread trace was not initialized . if ( ! trace . isInitialized ( ) ) { return 0 ; } trace . endEvent ( this , silenceThreshold ) ; return stopTimeMs - startTimeMs ;
public class DirectoryOperation { /** * Get the total number of bytes that this operation will transfer * @ return long */ public long getTransferSize ( ) throws SftpStatusException , SshException { } }
Object obj ; long size = 0 ; SftpFile sftpfile ; File file ; for ( Enumeration e = newFiles . elements ( ) ; e . hasMoreElements ( ) ; ) { obj = e . nextElement ( ) ; if ( obj instanceof File ) { file = ( File ) obj ; if ( file . isFile ( ) ) { size += file . length ( ) ; } } else if ( obj instanceof SftpFile ) { sftpfile = ( SftpFile ) obj ; if ( sftpfile . isFile ( ) ) { size += sftpfile . getAttributes ( ) . getSize ( ) . longValue ( ) ; } } } for ( Enumeration e = updatedFiles . elements ( ) ; e . hasMoreElements ( ) ; ) { obj = e . nextElement ( ) ; if ( obj instanceof File ) { file = ( File ) obj ; if ( file . isFile ( ) ) { size += file . length ( ) ; } } else if ( obj instanceof SftpFile ) { sftpfile = ( SftpFile ) obj ; if ( sftpfile . isFile ( ) ) { size += sftpfile . getAttributes ( ) . getSize ( ) . longValue ( ) ; } } } // Add a value for deleted files ? ? return size ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcThermalLoadSourceEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class ManagementModule { /** * { @ inheritDoc } */ @ Override public Date [ ] purgeDatastream ( Context context , String pid , String datastreamID , Date startDT , Date endDT , String logMessage ) throws ServerException { } }
return mgmt . purgeDatastream ( context , pid , datastreamID , startDT , endDT , logMessage ) ;
public class LongRunningJobMonitor { /** * Run the too long jobs detection on the { @ link Scheduler } . * This method is * not * thread safe . */ @ Override public void run ( ) { } }
long currentTime = timeProvider . currentTime ( ) ; for ( Job job : scheduler . jobStatus ( ) ) { cleanUpLongJobIfItHasFinishedExecuting ( currentTime , job ) ; detectLongRunningJob ( currentTime , job ) ; }
public class JCRStoreResource { /** * Tells the resource manager to forget about a heuristically completed * transaction branch . * @ param _ xid global transaction identifier ( not used , because each file * with the file id gets a new VFS store resource instance ) */ @ Override public void forget ( final Xid _xid ) { } }
if ( JCRStoreResource . LOG . isDebugEnabled ( ) ) { JCRStoreResource . LOG . debug ( "forget (xid = " + _xid + ")" ) ; }
public class TimeArrayTimeZoneRule { /** * { @ inheritDoc } */ @ Override public Date getPreviousStart ( long base , int prevOffset , int prevDSTSavings , boolean inclusive ) { } }
int i = startTimes . length - 1 ; for ( ; i >= 0 ; i -- ) { long time = getUTC ( startTimes [ i ] , prevOffset , prevDSTSavings ) ; if ( time < base || ( inclusive && time == base ) ) { return new Date ( time ) ; } } return null ;
public class MtasSolrBaseList { /** * Adds the . * @ param status * the status * @ throws IOException * Signals that an I / O exception has occurred . */ public void add ( MtasSolrStatus status ) throws IOException { } }
Objects . requireNonNull ( status ) ; if ( enabled ) { data . add ( status ) ; if ( ! index . containsKey ( status . key ( ) ) ) { index . put ( status . key ( ) , status ) ; garbageCollect ( ) ; } else { garbageCollect ( ) ; // retry MtasSolrStatus oldStatus = index . get ( status . key ( ) ) ; if ( oldStatus == null ) { index . put ( status . key ( ) , status ) ; } else if ( oldStatus . finished ( ) ) { remove ( oldStatus ) ; index . put ( status . key ( ) , status ) ; } else { throw new IOException ( "key " + status . key ( ) + " already exists" ) ; } } }
public class Instrumentation { /** * < code > optional string app _ name _ setter = 5 ; < / code > * < pre > * name of function ( & lt ; string & gt ; ) ; * used to inform the harness about the app name * < / pre > */ public java . lang . String getAppNameSetter ( ) { } }
java . lang . Object ref = appNameSetter_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( bs . isValidUtf8 ( ) ) { appNameSetter_ = s ; } return s ; }