signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JsonPath { /** * Applies this JsonPath to the provided json file * @ param jsonFile file to read from * @ param < T > expected return type * @ return list of objects matched by the given path * @ throws IOException */ @ SuppressWarnings ( { } }
"unchecked" } ) public < T > T read ( File jsonFile ) throws IOException { return read ( jsonFile , Configuration . defaultConfiguration ( ) ) ;
public class HadoopOutputFormatBase { @ Override public void configure ( Configuration parameters ) { } }
// enforce sequential configure ( ) calls synchronized ( CONFIGURE_MUTEX ) { if ( this . mapreduceOutputFormat instanceof Configurable ) { ( ( Configurable ) this . mapreduceOutputFormat ) . setConf ( this . configuration ) ; } }
public class FileUtilities { /** * This method checks for the requirement for an update . * If a the target file exists and the modification time is greater than the * modification time of the source file , we do not need to analyze something . * @ param sourceFile * is the source file where it is intended to be copied from . * @ param targetFile * is the file to which everything is to be copied to . * @ return < code > true < / code > is returned in case of a required update . * < code > false < / code > is returned otherwise . */ public static boolean isUpdateRequired ( File sourceFile , File targetFile ) { } }
if ( targetFile . exists ( ) ) { if ( targetFile . lastModified ( ) > sourceFile . lastModified ( ) ) { return false ; } } return true ;
public class ShiroRule { /** * { @ inheritDoc } */ @ Override public Statement apply ( final Statement base , FrameworkMethod method , Object target ) { } }
tearDownShiro ( ) ; // clean up whatever other tests might have left behind final SubjectAwareDescriptor desc = new SubjectAwareDescriptor ( ) ; SubjectAware subjectAware = SubjectAwares . find ( target ) ; if ( subjectAware != null ) { desc . merge ( subjectAware ) ; } subjectAware = SubjectAwares . find ( method . getAnnotations ( ) ) ; if ( subjectAware != null ) { desc . merge ( subjectAware ) ; } return new Statement ( ) { @ Override public void evaluate ( ) throws Throwable { if ( desc . isMerged ( ) ) { initializeSecurityManager ( desc ) ; } try { base . evaluate ( ) ; } finally { tearDownShiro ( ) ; } } } ;
public class HtmlTree { /** * Generates a UL tag with the style class attribute and some content . * @ param styleClass style for the tag * @ param body content for the tag * @ return an HtmlTree object for the UL tag */ public static HtmlTree UL ( HtmlStyle styleClass , Content body ) { } }
HtmlTree htmltree = new HtmlTree ( HtmlTag . UL , nullCheck ( body ) ) ; htmltree . addStyle ( nullCheck ( styleClass ) ) ; return htmltree ;
public class RatePlanCreator { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( uniqueName != null ) { request . addPostParam ( "UniqueName" , uniqueName ) ; } if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( dataEnabled != null ) { request . addPostParam ( "DataEnabled" , dataEnabled . toString ( ) ) ; } if ( dataLimit != null ) { request . addPostParam ( "DataLimit" , dataLimit . toString ( ) ) ; } if ( dataMetering != null ) { request . addPostParam ( "DataMetering" , dataMetering ) ; } if ( messagingEnabled != null ) { request . addPostParam ( "MessagingEnabled" , messagingEnabled . toString ( ) ) ; } if ( voiceEnabled != null ) { request . addPostParam ( "VoiceEnabled" , voiceEnabled . toString ( ) ) ; } if ( nationalRoamingEnabled != null ) { request . addPostParam ( "NationalRoamingEnabled" , nationalRoamingEnabled . toString ( ) ) ; } if ( internationalRoaming != null ) { for ( String prop : internationalRoaming ) { request . addPostParam ( "InternationalRoaming" , prop ) ; } } if ( nationalRoamingDataLimit != null ) { request . addPostParam ( "NationalRoamingDataLimit" , nationalRoamingDataLimit . toString ( ) ) ; } if ( internationalRoamingDataLimit != null ) { request . addPostParam ( "InternationalRoamingDataLimit" , internationalRoamingDataLimit . toString ( ) ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getEPM ( ) { } }
if ( epmEClass == null ) { epmEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 256 ) ; } return epmEClass ;
public class QueryRecord { /** * Add this table link to this query . * Creates a new tablelink and adds it to the link list . */ public void addRelationship ( int linkType , Record recLeft , Record recRight , int ifldLeft1 , int ifldRight1 , int ifldLeft2 , int ifldRight2 , int ifldLeft3 , int ifldRight3 ) { } }
String fldLeft1 = recLeft . getField ( ifldLeft1 ) . getFieldName ( ) ; String fldRight1 = recRight . getField ( ifldRight1 ) . getFieldName ( ) ; String fldLeft2 = ifldLeft2 != - 1 ? recLeft . getField ( ifldLeft2 ) . getFieldName ( ) : null ; String fldRight2 = ifldRight2 != - 1 ? recRight . getField ( ifldRight2 ) . getFieldName ( ) : null ; String fldLeft3 = ifldLeft3 != - 1 ? recLeft . getField ( ifldLeft3 ) . getFieldName ( ) : null ; String fldRight3 = ifldRight3 != - 1 ? recRight . getField ( ifldRight3 ) . getFieldName ( ) : null ; new TableLink ( this , linkType , recLeft , recRight , fldLeft1 , fldRight1 , fldLeft2 , fldRight2 , fldLeft3 , fldRight3 ) ;
public class HttpCarbonMessage { /** * Sends a push response message back to the client . * @ param httpCarbonMessage the push response message * @ param pushPromise the push promise associated with the push response message * @ return HttpResponseFuture which gives the status of the operation * @ throws ServerConnectorException if there is an error occurs while doing the operation */ public HttpResponseFuture pushResponse ( HttpCarbonMessage httpCarbonMessage , Http2PushPromise pushPromise ) throws ServerConnectorException { } }
httpOutboundRespFuture . notifyHttpListener ( httpCarbonMessage , pushPromise ) ; return httpOutboundRespStatusFuture ;
public class AbstractManagedType { /** * On check list attribute . * @ param < E > * the element type * @ param pluralAttribute * the plural attribute * @ param paramClass * the param class * @ return true , if successful */ private < E > boolean onCheckListAttribute ( PluralAttribute < ? super X , ? , ? > pluralAttribute , Class < E > paramClass ) { } }
if ( pluralAttribute != null ) { if ( isListAttribute ( pluralAttribute ) && isBindable ( pluralAttribute , paramClass ) ) { return true ; } } return false ;
public class Fingerprint { /** * Returns true if the serialized fingerprint matches the fingerprint in the content part . * @ param fp a parsed fingerprint object * @ return true if the given fingerprint matches the current fingerprint ' s content */ public boolean matchContent ( Fingerprint fp ) { } }
for ( Tag tag : CONTENT_TAGS ) { if ( ! getTag ( tag ) . equals ( fp . getTag ( tag ) ) ) { return false ; } } return true ;
public class CoherenceCacheTarget { /** * { @ inheritDoc } */ public void beginImport ( ) { } }
if ( m_cache == null ) { m_cache = CacheFactory . getCache ( m_cacheName ) ; } m_batch = new HashMap ( m_batchSize ) ;
public class ServiceInfo { /** * Removes the given Attributes from the Attributes of this < code > ServiceInfo < / code > , returning a new instance of * ServiceInfo containing the difference of the Attributes ; the current instance of ServiceInfo is left unaltered . * The given Attributes may only contain the tags to remove ( values are not considered ) . * @ param thatAttrs The < code > Attributes < / code > tags to remove from this instance ' s Attributes * @ return A newly created < code > ServiceInfo < / code > containing the difference of the Attributes * @ see Attributes # complement ( Attributes ) */ public ServiceInfo removeAttributes ( Attributes thatAttrs ) { } }
Attributes thisAttr = getAttributes ( ) ; Attributes mergedAttrs = null ; if ( thisAttr != null ) mergedAttrs = thisAttr . complement ( thatAttrs ) ; return clone ( getScopes ( ) , mergedAttrs ) ;
public class Solo { /** * Zooms in or out if startPoint1 and startPoint2 are larger or smaller then endPoint1 and endPoint2 . Requires API level > = 14. * @ param startPoint1 First " finger " down on the screen * @ param startPoint2 Second " finger " down on the screen * @ param endPoint1 Corresponding ending point of startPoint1 * @ param endPoint2 Corresponding ending point of startPoint2 */ public void pinchToZoom ( PointF startPoint1 , PointF startPoint2 , PointF endPoint1 , PointF endPoint2 ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "pinchToZoom(" + startPoint1 + ", " + startPoint2 + ", " + endPoint1 + ", " + endPoint2 + ")" ) ; } if ( android . os . Build . VERSION . SDK_INT < 14 ) { throw new RuntimeException ( "pinchToZoom() requires API level >= 14" ) ; } zoomer . generateZoomGesture ( startPoint1 , startPoint2 , endPoint1 , endPoint2 ) ;
public class ChannelAccess { /** * Handles a processed < tt > Buffer < / tt > . This method is invoked by the * asynchronous IO worker threads upon completion of the IO request with the * provided buffer and / or an exception that occurred while processing the request * for that buffer . * @ param buffer The buffer to be processed . * @ param ex The exception that occurred in the I / O threads when processing the buffer ' s request . */ final void handleProcessedBuffer ( T buffer , IOException ex ) { } }
if ( ex != null && this . exception == null ) { this . exception = ex ; } returnBuffer ( buffer ) ;
public class OperandStackStateGenerators { /** * Generates instructions to load the entire operand stack . Equivalent to calling * { @ code loadOperandStack ( markerType , storageVars , frame , 0 , 0 , frame . getStackSize ( ) ) } . * @ param markerType debug marker type * @ param storageVars variables to load operand stack from * @ param frame execution frame at the instruction where the operand stack is to be loaded * @ return instructions to load the operand stack from the storage variables * @ throws NullPointerException if any argument is { @ code null } */ public static InsnList loadOperandStack ( MarkerType markerType , StorageVariables storageVars , Frame < BasicValue > frame ) { } }
return loadOperandStack ( markerType , storageVars , frame , 0 , 0 , frame . getStackSize ( ) ) ;
public class ICalProperty { /** * Sets the property ' s parameters * @ param parameters the parameters ( cannot be null ) */ public void setParameters ( ICalParameters parameters ) { } }
if ( parameters == null ) { throw new NullPointerException ( Messages . INSTANCE . getExceptionMessage ( 16 ) ) ; } this . parameters = parameters ;
public class IOUtils { /** * Closing quietly any closeable object . Any exception will be caught ( but global error listeners will be notified ) * @ param closeable object to be closed quetly * @ return the same object provided in args * @ since 1.0 */ @ Weight ( Weight . Unit . LIGHT ) @ Nullable public static Closeable closeQuietly ( @ Nullable final Closeable closeable ) { } }
if ( closeable != null ) { try { closeable . close ( ) ; } catch ( Exception ex ) { MetaErrorListeners . fireError ( "Exception in closeQuietly" , ex ) ; } } return closeable ;
public class AbstractHibernateCriteriaBuilder { /** * Creates a Criterion that contrains a collection property by size * @ param propertyName The property name * @ param size The size to constrain by * @ return A Criterion instance */ public org . grails . datastore . mapping . query . api . Criteria sizeEq ( String propertyName , int size ) { } }
if ( ! validateSimpleExpression ( ) ) { throwRuntimeException ( new IllegalArgumentException ( "Call to [sizeEq] with propertyName [" + propertyName + "] and size [" + size + "] not allowed here." ) ) ; } propertyName = calculatePropertyName ( propertyName ) ; addToCriteria ( Restrictions . sizeEq ( propertyName , size ) ) ; return this ;
public class Ansi { /** * Prints formatted and colorized { @ code format } to { @ link System # out } * @ param format A format string whose output to be colorized * @ param args Arguments referenced by the format specifiers in the format */ public void outFormat ( String format , Object ... args ) { } }
format ( System . out , format , args ) ;
public class AbstractCaptcha { /** * 验证码写出到文件 * @ param file 文件 * @ throws IORuntimeException IO异常 */ public void write ( File file ) throws IORuntimeException { } }
try ( OutputStream out = FileUtil . getOutputStream ( file ) ) { this . write ( out ) ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; }
public class appfwfieldtype { /** * Use this API to update appfwfieldtype resources . */ public static base_responses update ( nitro_service client , appfwfieldtype resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { appfwfieldtype updateresources [ ] = new appfwfieldtype [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new appfwfieldtype ( ) ; updateresources [ i ] . name = resources [ i ] . name ; updateresources [ i ] . regex = resources [ i ] . regex ; updateresources [ i ] . priority = resources [ i ] . priority ; updateresources [ i ] . comment = resources [ i ] . comment ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class DirectoryReader { /** * Recursively builds a tree of the specified rootFolder * TODO : ChrisS : Note that the URL stuff is completely wrong and should NOT be here - that is view , this is model */ public DirectoryEntries listEntries ( File rootFolder , String relativePath ) { } }
DirectoryEntries entries = new DirectoryEntries ( ) ; if ( rootFolder == null ) { return entries ; } File [ ] files = rootFolder . listFiles ( VISIBLE_NON_SERIALIZED_FILES ) ; if ( files == null ) { return entries ; } Arrays . sort ( files , new FileComparator ( ) ) ; for ( File file : files ) { String name = file . getName ( ) ; String url = getUrl ( relativePath , name ) ; entries . add ( file . isDirectory ( ) ? new FolderDirectoryEntry ( name , url , listEntries ( file , getCurrentPath ( relativePath ) + name ) ) : new FileDirectoryEntry ( name , url ) ) ; } return entries ;
public class JvmAnnotationValueImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_ANNOTATION_VALUE__OPERATION : setOperation ( ( JvmOperation ) null ) ; return ; } super . eUnset ( featureID ) ;
public class StreamingJobsInner { /** * Lists all of the streaming jobs in the specified resource group . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; StreamingJobInner & gt ; object */ public Observable < Page < StreamingJobInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } }
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < StreamingJobInner > > , Page < StreamingJobInner > > ( ) { @ Override public Page < StreamingJobInner > call ( ServiceResponse < Page < StreamingJobInner > > response ) { return response . body ( ) ; } } ) ;
public class Emailer { /** * Send an email to the specified email list */ public void sendEmail ( final List < String > emailList , final String subject , final String body ) { } }
if ( emailList != null && ! emailList . isEmpty ( ) ) { final EmailMessage message = super . createEmailMessage ( subject , "text/html" , emailList ) ; message . setBody ( body ) ; sendEmail ( message , true , "email message " + body ) ; }
public class aaakcdaccount { /** * Use this API to delete aaakcdaccount resources of given names . */ public static base_responses delete ( nitro_service client , String kcdaccount [ ] ) throws Exception { } }
base_responses result = null ; if ( kcdaccount != null && kcdaccount . length > 0 ) { aaakcdaccount deleteresources [ ] = new aaakcdaccount [ kcdaccount . length ] ; for ( int i = 0 ; i < kcdaccount . length ; i ++ ) { deleteresources [ i ] = new aaakcdaccount ( ) ; deleteresources [ i ] . kcdaccount = kcdaccount [ i ] ; } result = delete_bulk_request ( client , deleteresources ) ; } return result ;
public class SQLExpressions { /** * As an aggregate function , DENSE _ RANK calculates the dense rank of a hypothetical row identified * by the arguments of the function with respect to a given sort specification . The arguments of * the function must all evaluate to constant expressions within each aggregate group , because they * identify a single row within each group . The constant argument expressions and the expressions * in the order _ by _ clause of the aggregate match by position . Therefore , the number of arguments * must be the same and types must be compatible . * @ param args arguments * @ return dense _ rank ( args ) */ public static WithinGroup < Long > denseRank ( Expression < ? > ... args ) { } }
return new WithinGroup < Long > ( Long . class , SQLOps . DENSERANK2 , args ) ;
public class Gauge { /** * Defines if the text of the sections should be drawn inside * the sections . This is currently only used in the SimpleSkin . * @ param VISIBLE */ public void setSectionTextVisible ( final boolean VISIBLE ) { } }
if ( null == sectionTextVisible ) { _sectionTextVisible = VISIBLE ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { sectionTextVisible . set ( VISIBLE ) ; }
public class CSVUtil { /** * Load the data from CSV . * @ param csvReader * @ param offset * @ param count * @ param filter * @ param columnTypeMap * @ return */ @ SuppressWarnings ( "rawtypes" ) public static < E extends Exception > DataSet loadCSV ( final Reader csvReader , long offset , long count , final Try . Predicate < String [ ] , E > filter , final Map < String , ? extends Type > columnTypeMap ) throws UncheckedIOException , E { } }
N . checkArgument ( offset >= 0 && count >= 0 , "'offset'=%s and 'count'=%s can't be negative" , offset , count ) ; if ( N . isNullOrEmpty ( columnTypeMap ) ) { throw new IllegalArgumentException ( "columnTypeMap can't be null or empty" ) ; } final BufferedReader br = csvReader instanceof BufferedReader ? ( BufferedReader ) csvReader : Objectory . createBufferedReader ( csvReader ) ; try { List < String > tmp = new ArrayList < > ( ) ; String line = br . readLine ( ) ; jsonParser . readString ( tmp , line ) ; final String [ ] titles = tmp . toArray ( new String [ tmp . size ( ) ] ) ; final int columnCount = titles . length ; final Type < ? > [ ] columnTypes = new Type < ? > [ columnCount ] ; final List < String > columnNameList = new ArrayList < > ( columnTypeMap . size ( ) ) ; final List < List < Object > > columnList = new ArrayList < > ( columnTypeMap . size ( ) ) ; for ( int i = 0 ; i < columnCount ; i ++ ) { if ( columnTypeMap . containsKey ( titles [ i ] ) ) { columnTypes [ i ] = columnTypeMap . get ( titles [ i ] ) ; columnNameList . add ( titles [ i ] ) ; columnList . add ( new ArrayList < > ( ) ) ; } } if ( columnNameList . size ( ) != columnTypeMap . size ( ) ) { final List < String > keys = new ArrayList < > ( columnTypeMap . keySet ( ) ) ; keys . removeAll ( columnNameList ) ; throw new AbacusException ( keys + " are not included in titles: " + N . toString ( titles ) ) ; } final String [ ] strs = new String [ titles . length ] ; while ( offset -- > 0 && br . readLine ( ) != null ) { } while ( count > 0 && ( line = br . readLine ( ) ) != null ) { jsonParser . readString ( strs , line ) ; if ( filter != null && filter . test ( strs ) == false ) { continue ; } for ( int i = 0 , columnIndex = 0 ; i < columnCount ; i ++ ) { if ( columnTypes [ i ] != null ) { columnList . get ( columnIndex ++ ) . add ( columnTypes [ i ] . valueOf ( strs [ i ] ) ) ; } } count -- ; } return new RowDataSet ( columnNameList , columnList ) ; } catch ( IOException e ) { throw new UncheckedIOException ( e ) ; } finally { if ( br != csvReader ) { Objectory . recycle ( br ) ; } }
public class PersistentExecutorImpl { /** * Invoked by a controller to notify a persistent executor that a task has been assigned to it . * @ param taskId unique identifier for the task . * @ param nextExecTime next execution time for the task . * @ param binaryFlags combination of bits for various binary values . * @ param transactionTimeout transaction timeout . */ public void notifyOfTaskAssignment ( long taskId , long nextExecTime , short binaryFlags , int transactionTimeout ) { } }
final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; Boolean previous = inMemoryTaskIds . put ( taskId , Boolean . TRUE ) ; if ( previous == null ) { InvokerTask task = new InvokerTask ( this , taskId , nextExecTime , binaryFlags , transactionTimeout ) ; long delay = nextExecTime - new Date ( ) . getTime ( ) ; if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( PersistentExecutorImpl . this , tc , "Found task " + taskId + " for " + delay + "ms from now" ) ; scheduledExecutor . schedule ( task , delay , TimeUnit . MILLISECONDS ) ; } else { if ( trace && tc . isDebugEnabled ( ) ) Tr . debug ( PersistentExecutorImpl . this , tc , "Found task " + taskId + " already scheduled" ) ; }
public class AbstractModel { /** * { @ inheritDoc } */ @ Override protected void prepareView ( ) { } }
try { if ( view ( ) != null ) { view ( ) . prepare ( ) ; } } catch ( final CoreException ce ) { throw new CoreRuntimeException ( ce ) ; }
public class MemcachedConnection { /** * Sets the supported features from a HELLO command . * Note that the actual enabled features will be the ones supported by the mock * and also supported by the client . Currently the only supported feature is * MUTATION _ SEQNO . * @ param input The features requested by the client . */ void setSupportedFeatures ( boolean [ ] input ) { } }
if ( input . length != supportedFeatures . length ) { throw new IllegalArgumentException ( "Bad features length!" ) ; } // Scan through all other features and disable them unless they are supported for ( int i = 0 ; i < input . length ; i ++ ) { BinaryHelloCommand . Feature feature = BinaryHelloCommand . Feature . valueOf ( i ) ; if ( feature == null ) { supportedFeatures [ i ] = false ; continue ; } switch ( feature ) { case MUTATION_SEQNO : case XERROR : case XATTR : case SELECT_BUCKET : case TRACING : supportedFeatures [ i ] = input [ i ] ; break ; case SNAPPY : supportedFeatures [ i ] = input [ i ] && server . getCompression ( ) != CompressionMode . DISABLED ; break ; default : supportedFeatures [ i ] = false ; break ; } } // Post - processing if ( supportedFeatures [ BinaryHelloCommand . Feature . MUTATION_SEQNO . getValue ( ) ] ) { miw . setEnabled ( true ) ; } else { miw . setEnabled ( false ) ; }
public class Equivalencer { /** * Finds all parameters equivalent to the < tt > sourceParameter < / tt > . * Note : The first time any method in { @ link Equivalencer } is called the * equivalencing engine is instantiated , but only once . The equivalencing * engine loads the BEL equivalence files associated with the framework and * uses them do perform equivalencing . * @ param sourceNamespace { @ link Namespace } , the namespace of the source * parameter * @ param sourceValue { @ link String } , the source parameter value * @ return the equivalent parameters or an empty map if no equivalent * parameter is found * @ throws EquivalencerException Thrown if an exception occurs finding all * equivalences */ public Map < Namespace , String > equivalence ( final Namespace sourceNamespace , final String sourceValue ) throws EquivalencerException { } }
if ( sourceNamespace == null ) { throw new InvalidArgument ( "sourceNamespace" , sourceNamespace ) ; } if ( noLength ( sourceValue ) ) { throw new InvalidArgument ( "sourceValue" , sourceValue ) ; } loadEquivalencingEngine ( ) ; final Parameter sp = new Parameter ( sourceNamespace , sourceValue ) ; try { final List < Parameter > equivalences = this . paramEquivalencer . findEquivalences ( sp ) ; Map < Namespace , String > equivalenceMap = sizedHashMap ( equivalences . size ( ) ) ; for ( final Parameter equivalence : equivalences ) { equivalenceMap . put ( equivalence . getNamespace ( ) , equivalence . getValue ( ) ) ; } return equivalenceMap ; } catch ( InvalidArgument e ) { // TODO change exception when paramEquivalencer is changed return null ; } catch ( Exception e ) { final String fmt = "Unable to find equivalences for '%s'" ; final String msg = format ( fmt , sp . toBELShortForm ( ) ) ; throw new EquivalencerException ( msg , e ) ; }
public class SensitiveFilter { /** * 是否有敏感字符 * @ param source * @ return */ @ SuppressWarnings ( "rawtypes" ) public boolean hasSensitiveWord ( String source ) { } }
char [ ] chars = source . toCharArray ( ) ; Map nowMap = pool ; for ( int i = 0 ; i < chars . length ; i ++ ) { char ch = Character . toUpperCase ( chars [ i ] ) ; nowMap = ( Map ) nowMap . get ( ch ) ; if ( nowMap != null ) { if ( "1" . equals ( nowMap . get ( "isEnd" ) ) ) { return true ; } } else { nowMap = pool ; nowMap = ( Map ) nowMap . get ( ch ) ; if ( nowMap != null ) { if ( "1" . equals ( nowMap . get ( "isEnd" ) ) ) { return true ; } } else { nowMap = pool ; } } } return false ;
public class DubiousSetOfCollections { /** * implement the visitor to set up the opcode stack , and make sure that collection , set and map classes could be loaded . * @ param clsContext * the context object of the currently parsed class */ @ Override public void visitClassContext ( ClassContext clsContext ) { } }
try { if ( ( collectionCls == null ) || ( setCls == null ) || ( mapCls == null ) ) { return ; } stack = new OpcodeStack ( ) ; super . visitClassContext ( clsContext ) ; } finally { stack = null ; }
public class AbstractDocumentationMojo { /** * Convert a a package name for therelative file . * @ param packageName the name . * @ return the file . */ protected static File toPackageFolder ( String packageName ) { } }
File file = null ; for ( final String element : packageName . split ( "[.]" ) ) { // $ NON - NLS - 1 $ if ( file == null ) { file = new File ( element ) ; } else { file = new File ( file , element ) ; } } return file ;
public class AppUrlList { /** * Gets the appUrls value for this AppUrlList . * @ return appUrls * List of URLs . On SET operation , empty list indicates to clear * the list . * < span class = " constraint CollectionSize " > The maximum * size of this collection is 10 . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . AppUrl [ ] getAppUrls ( ) { } }
return appUrls ;
public class druidGLexer { /** * $ ANTLR start " INSERT " */ public final void mINSERT ( ) throws RecognitionException { } }
try { int _type = INSERT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 581:11 : ( ( ' INSERT ' | ' insert ' ) ) // druidG . g : 581:12 : ( ' INSERT ' | ' insert ' ) { // druidG . g : 581:12 : ( ' INSERT ' | ' insert ' ) int alt1 = 2 ; int LA1_0 = input . LA ( 1 ) ; if ( ( LA1_0 == 'I' ) ) { alt1 = 1 ; } else if ( ( LA1_0 == 'i' ) ) { alt1 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 1 , 0 , input ) ; throw nvae ; } switch ( alt1 ) { case 1 : // druidG . g : 581:13 : ' INSERT ' { match ( "INSERT" ) ; } break ; case 2 : // druidG . g : 581:22 : ' insert ' { match ( "insert" ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class GetFindingsStatisticsRequest { /** * Types of finding statistics to retrieve . * @ param findingStatisticTypes * Types of finding statistics to retrieve . * @ see FindingStatisticType */ public void setFindingStatisticTypes ( java . util . Collection < String > findingStatisticTypes ) { } }
if ( findingStatisticTypes == null ) { this . findingStatisticTypes = null ; return ; } this . findingStatisticTypes = new java . util . ArrayList < String > ( findingStatisticTypes ) ;
public class CancelCapacityReservationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < CancelCapacityReservationRequest > getDryRunRequest ( ) { } }
Request < CancelCapacityReservationRequest > request = new CancelCapacityReservationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class AbstractObjectFactory { /** * Resolves the matching constructor for the specified Class type who ' s actual public constructor argument types * are assignment compatible with the expected parameter types . * @ param objectType the Class from which the constructor is resolved . * @ param parameterTypes the array of Class types determining the resolved constructor ' s signature . * @ return a matching constructor from the specified Class type who ' s actual public constructor argument types * are assignment compatible with the expected parameter types . * @ throws NullPointerException if either the objectType or parameterTypes are null . * @ see # resolveConstructor ( Class , Class [ ] ) * @ see java . lang . Class * @ see java . lang . reflect . Constructor */ @ SuppressWarnings ( "unchecked" ) protected Constructor resolveCompatibleConstructor ( final Class < ? > objectType , final Class < ? > [ ] parameterTypes ) { } }
for ( Constructor constructor : objectType . getConstructors ( ) ) { Class [ ] constructorParameterTypes = constructor . getParameterTypes ( ) ; if ( parameterTypes . length == constructorParameterTypes . length ) { boolean match = true ; for ( int index = 0 ; index < constructorParameterTypes . length ; index ++ ) { match &= constructorParameterTypes [ index ] . isAssignableFrom ( parameterTypes [ index ] ) ; } if ( match ) { return constructor ; } } } return null ;
public class XMLUtil { /** * Replies the color that corresponds to the specified attribute ' s path . * < p > The path is an ordered list of tag ' s names and ended by the name of * the attribute . * Be careful about the fact that the names are case sensitives . * @ param document is the XML document to explore . * @ param path is the list of and ended by the attribute ' s name . * @ param defaultValue is the default value to reply . * @ return the color of the specified attribute . */ @ Pure public static Integer getAttributeColorWithDefault ( Node document , Integer defaultValue , String ... path ) { } }
assert document != null : AssertMessages . notNullParameter ( 0 ) ; return getAttributeColorWithDefault ( document , true , defaultValue , path ) ;
public class JarDiff { /** * Load all the classes from the specified URL and store information * about them in the specified map . * This currently only works for jar files , < b > not < / b > directories * which contain classes in subdirectories or in the current directory . * @ param infoMap the map to store the ClassInfo in . * @ param file the jarfile to load classes from . * @ throws IOException if there is an IOException reading info about a * class . */ private void loadClasses ( Map infoMap , File file ) throws DiffException { } }
try { JarFile jar = new JarFile ( file ) ; Enumeration e = jar . entries ( ) ; while ( e . hasMoreElements ( ) ) { JarEntry entry = ( JarEntry ) e . nextElement ( ) ; String name = entry . getName ( ) ; if ( ! entry . isDirectory ( ) && name . endsWith ( ".class" ) ) { ClassReader reader = new ClassReader ( jar . getInputStream ( entry ) ) ; ClassInfo ci = loadClassInfo ( reader ) ; infoMap . put ( ci . getName ( ) , ci ) ; } } } catch ( IOException ioe ) { throw new DiffException ( ioe ) ; }
public class HelloSignClient { /** * Retrieves a Signature Request with the given ID . * @ param id String signature ID * @ return SignatureRequest * @ throws HelloSignException thrown if there ' s a problem processing the * HTTP request or the JSON response . */ public SignatureRequest getSignatureRequest ( String id ) throws HelloSignException { } }
String url = BASE_URI + SIGNATURE_REQUEST_URI + "/" + id ; return new SignatureRequest ( httpClient . withAuth ( auth ) . get ( url ) . asJson ( ) ) ;
public class GoogleDriveUtils { /** * Downloads file from Google Drive * @ param drive drive client * @ param fileId file id for file to be downloaded * @ return file content * @ throws IOException an IOException */ public static DownloadResponse downloadFile ( Drive drive , String fileId ) throws IOException { } }
Get request = drive . files ( ) . get ( fileId ) . setAlt ( "media" ) ; String contentType = request . executeUsingHead ( ) . getContentType ( ) ; if ( StringUtils . isNotBlank ( contentType ) ) { try ( InputStream inputStream = request . executeAsInputStream ( ) ) { return new DownloadResponse ( contentType , IOUtils . toByteArray ( inputStream ) ) ; } } return null ;
public class UITabPanel { /** * Just call component . setId ( component . getId ( ) ) to reset all client ids and ensure they will be calculated for the current row , but do not waste * time dealing with row state code . * @ param parent * @ param iterateFacets * @ param restoreChildFacets */ private void restoreDescendantComponentWithoutRestoreState ( UIComponent parent , boolean iterateFacets , boolean restoreChildFacets ) { } }
if ( iterateFacets && parent . getFacetCount ( ) > 0 ) { Iterator < UIComponent > childIterator = parent . getFacets ( ) . values ( ) . iterator ( ) ; while ( childIterator . hasNext ( ) ) { UIComponent component = childIterator . next ( ) ; // reset the client id ( see spec 3.1.6) component . setId ( component . getId ( ) ) ; if ( ! component . isTransient ( ) ) { restoreDescendantComponentWithoutRestoreState ( component , restoreChildFacets , true ) ; } } } if ( parent . getChildCount ( ) > 0 ) { for ( int i = 0 ; i < parent . getChildCount ( ) ; i ++ ) { UIComponent component = parent . getChildren ( ) . get ( i ) ; // reset the client id ( see spec 3.1.6) component . setId ( component . getId ( ) ) ; if ( ! component . isTransient ( ) ) { restoreDescendantComponentWithoutRestoreState ( component , restoreChildFacets , true ) ; } } }
public class JsonRpcBasicServer { /** * Determines whether or not the given { @ link JsonNode } matches * the given type . This method is limited to a few java types * only and shouldn ' t be used to determine with great accuracy * whether or not the types match . * @ param node the { @ link JsonNode } * @ param type the { @ link Class } * @ return true if the types match , false otherwise */ @ SuppressWarnings ( "SimplifiableIfStatement" ) private boolean isMatchingType ( JsonNode node , Class < ? > type ) { } }
if ( node . isNull ( ) ) { return true ; } if ( node . isTextual ( ) ) { return String . class . isAssignableFrom ( type ) ; } if ( node . isNumber ( ) ) { return isNumericAssignable ( type ) ; } if ( node . isArray ( ) && type . isArray ( ) ) { return node . size ( ) > 0 && isMatchingType ( node . get ( 0 ) , type . getComponentType ( ) ) ; } if ( node . isArray ( ) ) { return type . isArray ( ) || Collection . class . isAssignableFrom ( type ) ; } if ( node . isBinary ( ) ) { return byteOrCharAssignable ( type ) ; } if ( node . isBoolean ( ) ) { return boolean . class . isAssignableFrom ( type ) || Boolean . class . isAssignableFrom ( type ) ; } if ( node . isObject ( ) || node . isPojo ( ) ) { return ! type . isPrimitive ( ) && ! String . class . isAssignableFrom ( type ) && ! Number . class . isAssignableFrom ( type ) && ! Boolean . class . isAssignableFrom ( type ) ; } return false ;
public class TemplateTypeMap { /** * Returns a new TemplateTypeMap with the given template types removed . Keys will only be removed * if they are unmapped . */ TemplateTypeMap remove ( Set < TemplateType > toRemove ) { } }
ImmutableList . Builder < TemplateType > keys = ImmutableList . builder ( ) ; keys . addAll ( templateKeys . subList ( 0 , templateValues . size ( ) ) ) ; for ( int i = templateValues . size ( ) ; i < templateKeys . size ( ) ; i ++ ) { TemplateType key = templateKeys . get ( i ) ; if ( ! toRemove . contains ( key ) ) { keys . add ( key ) ; } } return registry . createTemplateTypeMap ( keys . build ( ) , templateValues ) ;
public class GroovyPagesUriSupport { /** * Obtains the URI to a template using the controller name and template name * @ param controllerName The controller name * @ param templateName The template name * @ return The template URI */ public String getTemplateURI ( String controllerName , String templateName ) { } }
return getTemplateURI ( controllerName , templateName , true ) ;
public class ExtensionSetting { /** * Gets the extensions value for this ExtensionSetting . * @ return extensions * The list of feed items to add or modify . */ public com . google . api . ads . adwords . axis . v201809 . cm . ExtensionFeedItem [ ] getExtensions ( ) { } }
return extensions ;
public class ConsumerSessionProxy { /** * Common start code . This is called from the start ( ) method , and also the stop ( ) method . * In the latter case , this is done to avoid blocking the start ( ) method if it is invoked * concurrently with the stop ( ) method . * Note : this method must only be called while the syncLock monitor is held . * @ throws SISessionUnavailableException * @ throws SISessionDroppedException * @ throws SIConnectionUnavailableException * @ throws SIConnectionDroppedException * @ throws SIResourceException * @ throws SIConnectionLostException * @ throws SIErrorException */ private void startInternal ( ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIResourceException , SIConnectionLostException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "startInternal" ) ; state = StateEnum . STARTED ; // If there is a proxy queue registered , defer start // processing to it . Otherwise send the start // flow to our peer ourselves . if ( proxyQueue != null ) proxyQueue . start ( ) ; else { CommsByteBuffer request = getCommsByteBuffer ( ) ; // Build Message Header request . putShort ( getConnectionObjectID ( ) ) ; request . putShort ( getProxyID ( ) ) ; // At this stage we would prefer to exchange the start to ensure that the // calls are correctly ordered at the server as the next call could be a // receive with a transaction - and that will be ordered seperately . // However , we only do this if we are > = FAP3. final HandshakeProperties props = getConversation ( ) . getHandshakeProperties ( ) ; if ( props . getFapLevel ( ) >= JFapChannelConstants . FAP_VERSION_3 ) { // Pass on call to server CommsByteBuffer reply = jfapExchange ( request , JFapChannelConstants . SEG_START_SESS , JFapChannelConstants . PRIORITY_MEDIUM , true ) ; try { short err = reply . getCommandCompletionCode ( JFapChannelConstants . SEG_START_SESS_R ) ; if ( err != CommsConstants . SI_NO_EXCEPTION ) { checkFor_SISessionUnavailableException ( reply , err ) ; checkFor_SISessionDroppedException ( reply , err ) ; checkFor_SIConnectionUnavailableException ( reply , err ) ; checkFor_SIConnectionDroppedException ( reply , err ) ; checkFor_SIResourceException ( reply , err ) ; checkFor_SIConnectionLostException ( reply , err ) ; checkFor_SIErrorException ( reply , err ) ; defaultChecker ( reply , err ) ; } } finally { if ( reply != null ) reply . release ( ) ; } } else { // Just send it instead jfapSend ( request , JFapChannelConstants . SEG_START_SESS , JFapChannelConstants . PRIORITY_MEDIUM , true , ThrottlingPolicy . BLOCK_THREAD ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "startInternal" ) ;
public class OperationAnalyzer { /** * This method analyzes the fields , calculates the info and returns true if operation is undefined . * @ param destination destination field * @ param source source field * @ return returns true if an operation between fields exists * @ see InfoOperation */ public boolean isUndefined ( final Field destination , final Field source ) { } }
info = null ; for ( IOperationAnalyzer analyzer : analyzers ) if ( analyzer . verifyConditions ( destination , source ) ) info = analyzer . getInfoOperation ( destination , source ) ; // if the operation has not been identified if ( isNull ( info ) ) info = undefinedOperation ( ) ; boolean conversionMethodExists = conversionAnalyzer . fieldsToCheck ( destination , source ) ; OperationType operationType = info . getOperationType ( ) ; if ( operationType . isUndefined ( ) && ! conversionMethodExists ) return true ; if ( conversionMethodExists ) // explicit conversion between primitive types info . setInstructionType ( operationType . isBasic ( ) ? OperationType . BASIC_CONVERSION // explicit conversion between complex types : OperationType . CONVERSION ) ; return false ;
public class MetadataStore { /** * Deletes a Segment and any associated information from the Metadata Store . * Notes : * - This method removes both the Segment and its Metadata Store entries . * - { @ link # clearSegmentInfo } only removes Metadata Store entries . * This operation is made of multiple steps and is restart - able . If it was only able to execute partially before being * interrupted ( by an unexpected exception or system crash ) , a reinvocation should be able to pick up from where it * left off previously . A partial invocation may leave the Segment in an undefined state , so it is highly recommended * that such an interrupted call be reinvoked until successful . * @ param segmentName The case - sensitive Segment Name . * @ param timeout Timeout for the operation . * @ return A CompletableFuture that , when completed normally , will contain a Boolean indicating whether the Segment * has been deleted ( true means there was a Segment to delete , false means there was no segment to delete ) . If the * operation failed , this will contain the exception that caused the failure . */ CompletableFuture < Boolean > deleteSegment ( String segmentName , Duration timeout ) { } }
long traceId = LoggerHelpers . traceEnterWithContext ( log , traceObjectId , "deleteSegment" , segmentName ) ; TimeoutTimer timer = new TimeoutTimer ( timeout ) ; // Find the Segment ' s Id . long segmentId = this . connector . containerMetadata . getStreamSegmentId ( segmentName , true ) ; CompletableFuture < Void > deleteSegment ; if ( isValidSegmentId ( segmentId ) ) { // This segment is currently mapped in the ContainerMetadata . if ( this . connector . containerMetadata . getStreamSegmentMetadata ( segmentId ) . isDeleted ( ) ) { // . . . but it is marked as Deleted , so nothing more we can do here . deleteSegment = CompletableFuture . completedFuture ( null ) ; } else { // Queue it up for deletion . This ensures that any component that is actively using it will be notified . deleteSegment = this . connector . getLazyDeleteSegment ( ) . apply ( segmentId , timer . getRemaining ( ) ) ; } } else { // This segment is not currently mapped in the ContainerMetadata . As such , it is safe to delete it directly . deleteSegment = this . connector . getDirectDeleteSegment ( ) . apply ( segmentName , timer . getRemaining ( ) ) ; } // It is OK if the previous action indicated the Segment was deleted . We still need to make sure that any traces // of this Segment are cleared from the Metadata Store as this invocation may be a retry of a previous partially // executed operation ( where we only managed to delete the Segment , but not clear the Metadata ) . val result = Futures . exceptionallyExpecting ( deleteSegment , ex -> ex instanceof StreamSegmentNotExistsException , null ) . thenComposeAsync ( ignored -> clearSegmentInfo ( segmentName , timer . getRemaining ( ) ) , this . executor ) ; if ( log . isTraceEnabled ( ) ) { deleteSegment . thenAccept ( v -> LoggerHelpers . traceLeave ( log , traceObjectId , "deleteSegment" , traceId , segmentName ) ) ; } return result ;
public class DolphinServlet { /** * TODO : should this method be final ? */ @ Override protected void doPost ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
preProcessRequest ( request ) ; DefaultServerDolphin serverDolphin = resolveServerDolphin ( request ) ; String input = readInput ( request ) ; if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . finest ( "received json: " + input ) ; } List < Command > commands = decodeInput ( serverDolphin . getServerConnector ( ) . getCodec ( ) , input ) ; List < Command > results = handleCommands ( serverDolphin . getServerConnector ( ) , commands ) ; String output = encodeOutput ( serverDolphin . getServerConnector ( ) . getCodec ( ) , results ) ; writeHeaders ( request , response , results ) ; if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . finest ( "sending json response: " + output ) ; } writeOutput ( response , output ) ; postProcessResponse ( response ) ;
public class HttpMessageConverter { /** * Message headers consist of standard HTTP message headers and custom headers . * This method assumes that all header entries that were not initially mapped * by header mapper implementations are custom headers . * @ param httpHeaders all message headers in their pre nature . * @ param mappedHeaders the previously mapped header entries ( all standard headers ) . * @ return The map of custom headers */ private Map < String , String > getCustomHeaders ( HttpHeaders httpHeaders , Map < String , Object > mappedHeaders ) { } }
Map < String , String > customHeaders = new HashMap < > ( ) ; for ( Map . Entry < String , List < String > > header : httpHeaders . entrySet ( ) ) { if ( ! mappedHeaders . containsKey ( header . getKey ( ) ) ) { customHeaders . put ( header . getKey ( ) , StringUtils . collectionToCommaDelimitedString ( header . getValue ( ) ) ) ; } } return customHeaders ;
public class RestClient { /** * Find assets based on the < code > searchString < / code > . * NOTE : TODO at the moment this only works when called against an unauthenticated Client * due to a problem with how the stores are defined ( the company values are defined * incorrectly ) . * @ param searchString The string to search for * @ param types The types to filter the results for * @ return The assets that match the search string and type * @ throws IOException * @ throws RequestFailureException */ @ Override public List < Asset > findAssets ( String searchString , Collection < ResourceType > types ) throws IOException , RequestFailureException { } }
String encodedSearchString = URLEncoder . encode ( searchString , "UTF-8" ) ; StringBuffer url = new StringBuffer ( "/assets?q=" + encodedSearchString ) ; if ( types != null && ! types . isEmpty ( ) ) { Collection < String > typeValues = new HashSet < String > ( ) ; for ( ResourceType type : types ) { typeValues . add ( type . getValue ( ) ) ; } url . append ( "&" + createListFilter ( FilterableAttribute . TYPE , typeValues ) ) ; } // Call massive to run the query HttpURLConnection connection = createHttpURLConnectionToMassive ( url . toString ( ) ) ; connection . setRequestMethod ( "GET" ) ; testResponseCode ( connection ) ; InputStream is = connection . getInputStream ( ) ; // take the returned input stream and convert it to assets List < Asset > assets = JSONAssetConverter . readValues ( is ) ; return assets ;
public class V2Wallet { /** * New transaction handler , incoming funds only based on BIP32 derived key * @ param tx - incoming funds transaction * @ param key - related V2Key * @ throws Exception */ public void newTransactionHandler ( PersistedTransaction tx , PersistedV2Key key ) throws Exception { } }
listenForUpdates ( tx ) ; tx . keyId = key . getId ( ) ; tx . walletId = this . descriptor . getKey ( ) ; tx . account = key . account ; System . out . println ( "Incoming transaction captured in API service: " + tx . toString ( ) ) ; transactionDAO . create ( tx ) ; WalletChange change = new WalletChange ( tx . value , getBalance ( ) , createKey ( key . account ) . address , tx ) ; WalletChangeMessage out = new WalletChangeMessage ( ) ; out . setCommand ( Command . BALANCE_CHANGE_RECEIVED ) ; out . setKey ( this . descriptor . key ) ; out . setPayload ( change ) ; router . sendUpdate ( this . descriptor . key , out ) ;
public class Expression { /** * Creates a Collate expression with the given Collation specification . Commonly * the collate expression is used in the Order BY clause or the string comparison * expression ( e . g . equalTo or lessThan ) to specify how the two strings are compared . * @ param collation The collation object . * @ return A Collate expression . */ @ NonNull public Expression collate ( @ NonNull Collation collation ) { } }
if ( collation == null ) { throw new IllegalArgumentException ( "collation cannot be null." ) ; } return new CollationExpression ( this , collation ) ;
public class ExecutionItemFactory { /** * Create a workflow execution item for a plugin node step . */ public static StepExecutionItem createPluginNodeStepItem ( final String type , final Map configuration , final boolean keepgoingOnSuccess , final StepExecutionItem handler , final String label , final List < PluginConfiguration > filterConfigurations ) { } }
return new PluginNodeStepExecutionItemImpl ( type , configuration , keepgoingOnSuccess , handler , label , filterConfigurations ) ;
public class array { /** * Randomize the { @ code array } using the given { @ link Random } object . The used * shuffling algorithm is from D . Knuth TAOCP , Seminumerical Algorithms , * Third edition , page 142 , Algorithm S ( Selection sampling technique ) . * @ param array the array to shuffle * @ param random the PRNG */ public static double [ ] shuffle ( final double [ ] array , final Random random ) { } }
for ( int j = array . length - 1 ; j > 0 ; -- j ) { swap ( array , j , random . nextInt ( j + 1 ) ) ; } return array ;
public class AnnotationExtensions { /** * Checks if is annotation present through making a lookup if the given annotation class is * present in the given class or in one of the super classes . * @ param componentClass * the component class * @ param annotationClass * the annotation class * @ return true , if is annotation present */ public static boolean isAnnotationPresentInSuperClasses ( final Class < ? > componentClass , final Class < ? extends Annotation > annotationClass ) { } }
if ( componentClass . isAnnotationPresent ( annotationClass ) ) { return true ; } Class < ? > superClass = componentClass . getSuperclass ( ) ; while ( superClass != null ) { if ( superClass . isAnnotationPresent ( annotationClass ) ) { return true ; } superClass = superClass . getSuperclass ( ) ; } return false ;
public class ServerCommandClient { /** * Write a command to the server process . * @ param command the command to write * @ param notStartedRC the return code if the server could not be reached * @ param errorRC the return code if an error occurred while communicating * with the server * @ return { @ link ReturnCode # OK } if the command was sent , notStartedRC if * the server could not be reached , timeoutRC if the client timed * out reading a response from the server , { @ link ReturnCode # SERVER _ COMMAND _ PORT _ DISABLED _ STATUS } if the * server ' s command port listener is disabled , or errorRC if any * other communication error occurred */ private ReturnCode write ( String command , ReturnCode notStartedRC , ReturnCode errorRC ) { } }
SocketChannel channel = null ; try { ServerCommandID commandID = createServerCommand ( command ) ; if ( commandID . getPort ( ) > 0 ) { channel = SelectorProvider . provider ( ) . openSocketChannel ( ) ; channel . connect ( new InetSocketAddress ( InetAddress . getByName ( null ) , commandID . getPort ( ) ) ) ; // Write command . write ( channel , commandID . getCommandString ( ) ) ; // Receive authorization challenge . String authID = read ( channel ) ; // Respond to authorization challenge . File authFile = new File ( commandAuthDir , authID ) ; // Delete a file created by the server ( check for write access ) authFile . delete ( ) ; // respond to the server to indicate the delete has happened . write ( channel , authID ) ; // Read command response . String cmdResponse = read ( channel ) , targetServerUUID = null , responseCode = null ; if ( cmdResponse . isEmpty ( ) ) { throw new IOException ( "connection closed by server without a reply" ) ; } if ( cmdResponse . indexOf ( DELIM ) != - 1 ) { targetServerUUID = cmdResponse . substring ( 0 , cmdResponse . indexOf ( DELIM ) ) ; responseCode = cmdResponse . substring ( cmdResponse . indexOf ( DELIM ) + 1 ) ; } else { targetServerUUID = cmdResponse ; } if ( ! commandID . validateTarget ( targetServerUUID ) ) { throw new IOException ( "command file mismatch" ) ; } ReturnCode result = ReturnCode . OK ; if ( responseCode != null ) { try { int returnCode = Integer . parseInt ( responseCode . trim ( ) ) ; result = ReturnCode . getEnum ( returnCode ) ; } catch ( NumberFormatException nfe ) { throw new IOException ( "invalid return code" ) ; } } if ( result == ReturnCode . INVALID ) { throw new IOException ( "invalid return code" ) ; } return result ; } if ( commandID . getPort ( ) == - 1 ) { return ReturnCode . SERVER_COMMAND_PORT_DISABLED_STATUS ; } return notStartedRC ; } catch ( ConnectException e ) { Debug . printStackTrace ( e ) ; return notStartedRC ; } catch ( IOException e ) { Debug . printStackTrace ( e ) ; return errorRC ; } finally { Utils . tryToClose ( channel ) ; }
public class GcloudArgs { /** * Returns { @ code [ - - name , path . toString ( ) ] } or { @ code [ ] } if path is null , or its representation * is empty . */ public static List < String > get ( String name , @ Nullable Path path ) { } }
return Args . path ( name , path ) ;
public class CPDAvailabilityEstimatePersistenceImpl { /** * Clears the cache for all cpd availability estimates . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CPDAvailabilityEstimateImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class AbstractGenericTreeNode { /** * Adds the child . * @ param child * the child */ @ Override public void addChild ( final ITreeNode < T > child ) { } }
if ( children != null ) { children . add ( child ) ; } else { children = new ArrayList < > ( ) ; children . add ( child ) ; }
public class Validate { /** * Validates the CPE URI against the CPE 2.2 specification . * @ param value the value to validate * @ return the validation status given value ; * @ see us . springett . parsers . cpe . util . Status # isValid ( ) */ public static Status cpeUri ( String value ) { } }
try { String [ ] parts = value . split ( ":" ) ; if ( parts . length > 8 || parts . length == 1 || ! "cpe" . equalsIgnoreCase ( parts [ 0 ] ) ) { LOG . warn ( "The CPE (" + value + ") is invalid as it is not in the CPE 2.2 URI format" ) ; return Status . INVALID ; } if ( parts . length >= 2 && parts [ 1 ] . length ( ) == 2 ) { boolean found = false ; String a = parts [ 1 ] . substring ( 1 ) ; for ( Part p : Part . values ( ) ) { if ( p . getAbbreviation ( ) . equals ( a ) ) { found = true ; break ; } } if ( ! found ) { LOG . warn ( "The CPE (" + value + ") is invalid as it has an invalid part attribute" ) ; return Status . INVALID_PART ; } } else { LOG . warn ( "The CPE (" + value + ") is invalid as it has an invalid part attribute" ) ; return Status . INVALID_PART ; } if ( parts . length > 2 ) { if ( "*" . equals ( parts [ 2 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid vendor - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 2 ] ) ) ; if ( ! s . isValid ( ) || "*" . equals ( parts [ 2 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid vendor - " + s . getMessage ( ) ) ; return s ; } } if ( parts . length > 3 ) { if ( "*" . equals ( parts [ 3 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid product - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 3 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid product - " + s . getMessage ( ) ) ; return s ; } } if ( parts . length > 4 ) { if ( "*" . equals ( parts [ 4 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid version - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 4 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid version - " + s . getMessage ( ) ) ; return s ; } } if ( parts . length > 5 ) { if ( "*" . equals ( parts [ 5 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid update - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 5 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid update - " + s . getMessage ( ) ) ; return s ; } } if ( parts . length > 6 ) { if ( parts [ 6 ] . startsWith ( "~" ) ) { if ( countCharacter ( parts [ 6 ] , '~' ) != 5 ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed edition - too many entries" ) ; return Status . INVALID ; } String [ ] unpacked = parts [ 6 ] . split ( "~" ) ; if ( unpacked . length > 1 ) { if ( "*" . equals ( unpacked [ 1 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed edition - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( unpacked [ 1 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed edition - " + s . getMessage ( ) ) ; return s ; } } if ( unpacked . length > 2 ) { if ( "*" . equals ( unpacked [ 2 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed sw_edition - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( unpacked [ 2 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed sw_edition - " + s . getMessage ( ) ) ; return s ; } } if ( unpacked . length > 3 ) { if ( "*" . equals ( unpacked [ 3 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed target_sw - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( unpacked [ 3 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed target_sw - " + s . getMessage ( ) ) ; return s ; } } if ( unpacked . length > 4 ) { if ( "*" . equals ( unpacked [ 4 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed target_hw - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( unpacked [ 4 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed target_hw - " + s . getMessage ( ) ) ; return s ; } } if ( unpacked . length > 5 ) { if ( "*" . equals ( unpacked [ 5 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed other - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( unpacked [ 5 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid packed other - " + s . getMessage ( ) ) ; return s ; } } } else { if ( "*" . equals ( parts [ 6 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid edition - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 6 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid edition - " + s . getMessage ( ) ) ; return s ; } } } if ( parts . length > 7 ) { if ( "*" . equals ( parts [ 7 ] ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid language - asterisk" ) ; return Status . INVALID ; } Status s = component ( Convert . cpeUriToWellFormed ( parts [ 7 ] ) ) ; if ( ! s . isValid ( ) ) { LOG . warn ( "The CPE (" + value + ") has an invalid language - " + s . getMessage ( ) ) ; return s ; } } } catch ( CpeEncodingException ex ) { LOG . warn ( "The CPE (" + value + ") has an unencoded special characters" ) ; return Status . INVALID ; } return Status . VALID ;
public class DocxService { /** * Load and return an in - memory representation of a docx . * This is public API because building the in - memory structure can be * quite slow . Thus , clients can use this method to cache the in - memory * structure , and pass in to either * { @ link # merge ( String , WordprocessingMLPackage , OutputStream , MatchingPolicy ) } * or { @ link # merge ( org . w3c . dom . Document , org . docx4j . openpackaging . packages . WordprocessingMLPackage , java . io . OutputStream , org . isisaddons . module . docx . dom . DocxService . MatchingPolicy , org . isisaddons . module . docx . dom . DocxService . OutputType ) } */ @ Programmatic public WordprocessingMLPackage loadPackage ( final InputStream docxTemplate ) throws LoadTemplateException { } }
final WordprocessingMLPackage docxPkg ; try { docxPkg = WordprocessingMLPackage . load ( docxTemplate ) ; } catch ( final Docx4JException ex ) { throw new LoadTemplateException ( "Unable to load docx template from input stream" , ex ) ; } return docxPkg ;
public class SchemaSet { /** * Returns < tt > true < / tt > if this set contains the specified element . More * formally , only returns < tt > true < / tt > if this set contains an * element < code > e < / code > such that < code > ( o = = null ? e = = null : * o . equals ( e ) ) < / code > . * It is possible for a false negative to be returned , if the element is added * after the safeTable reference has been set , or if memory hasn ' t yet been * written back . However , this is benign & will merely cause a schema to be sent * unnecessarily . When the add method is called to add that element the existing * entry will be found , so we won ' t end up with a duplicate . * Synchronizing to avoid the possibility of false negatives would be * unnecessarily expensive . * @ param o element whose presence in this set is to be tested . * @ return < tt > true < / tt > if this set contains the specified element . * @ throws ClassCastException if the type of the specified element * is incompatible with this set ( optional ) . * @ throws NullPointerException if the specified element is null and this * set does not support null elements ( optional ) . */ public boolean contains ( Object o ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "contains" , debugId ( o ) ) ; boolean result = false ; /* It should always be an Id so just cast it . If someone is using the */ /* class for the wrong purpose they will get a ClassCastException , which */ /* is permissable . */ Long id = ( Long ) o ; /* Because the table could be ' resized ' ( and therefore replaced ) during */ /* the method we get a local ref to the current one & use it throughout . */ Entry [ ] safeTable = table ; /* NPE is also permissable , if someone is using the class incorrectly . */ int i = hashToTable ( id , safeTable ) ; /* Search the appropriate Entry list from the table . */ if ( safeTable [ i ] != null ) { Entry current = safeTable [ i ] ; if ( current . contains ( id , 0 ) == 0 ) { result = true ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "contains" , result ) ; return result ;
public class ConfigurableGlobDatasetFinder { /** * Finds all directories satisfying the input glob pattern , and creates a { @ link org . apache . gobblin . data . management . retention . dataset . CleanableDataset } * for each one using { @ link # datasetAtPath } . * @ return List of { @ link org . apache . gobblin . data . management . retention . dataset . CleanableDataset } s in the file system . * @ throws IOException */ @ Override public List < T > findDatasets ( ) throws IOException { } }
List < T > datasets = Lists . newArrayList ( ) ; LOG . info ( "Finding datasets for pattern " + this . datasetPattern ) ; FileStatus [ ] fileStatuss = this . getDatasetDirs ( ) ; if ( fileStatuss != null ) { for ( FileStatus fileStatus : fileStatuss ) { Path pathToMatch = PathUtils . getPathWithoutSchemeAndAuthority ( fileStatus . getPath ( ) ) ; if ( this . blacklist . isPresent ( ) && this . blacklist . get ( ) . matcher ( pathToMatch . toString ( ) ) . find ( ) ) { continue ; } if ( this . globPatternBlacklist . isPresent ( ) && this . globPatternBlacklist . get ( ) . matcher ( pathToMatch . toString ( ) ) . find ( ) ) { continue ; } LOG . info ( "Found dataset at " + fileStatus . getPath ( ) ) ; datasets . add ( datasetAtPath ( PathUtils . getPathWithoutSchemeAndAuthority ( fileStatus . getPath ( ) ) ) ) ; } } return datasets ;
public class StorageWriter { /** * Calculates the amount of time that should be used as a timeout for WriterDataSource reads . The following rules * are taken into consideration : * * If at least one SegmentAggregator needs to flush right away , the timeout returned is 0. * * The returned timeout is the amount of time until the first SegmentAggregator is due to flush . * * The returned timeout ( except in the first case ) is bounded by WriterConfig . MinReadTimeout and WriterConfig . MaxReadTimeout . */ private Duration getReadTimeout ( ) { } }
// Find the minimum expiration time among all SegmentAggregators . long maxTimeMillis = this . config . getMaxReadTimeout ( ) . toMillis ( ) ; long minTimeMillis = this . config . getMinReadTimeout ( ) . toMillis ( ) ; long timeMillis = maxTimeMillis ; for ( ProcessorCollection a : this . processors . values ( ) ) { if ( a . mustFlush ( ) ) { // We found a SegmentAggregator that needs to flush right away . No need to search anymore . timeMillis = 0 ; break ; } timeMillis = MathHelpers . minMax ( this . config . getFlushThresholdTime ( ) . minus ( a . getElapsedSinceLastFlush ( ) ) . toMillis ( ) , minTimeMillis , timeMillis ) ; } return Duration . ofMillis ( timeMillis ) ;
public class AnyOf { /** * Returns true if this condition is compatible with the given test case properties . * A condition is < em > " compatible " < / em > with these properties if it is already satisfied * or if it could be satisfied with the addition of more properties . */ public boolean compatible ( PropertySet properties ) { } }
boolean isCompatible ; Iterator < ICondition > conditions ; for ( conditions = getConditions ( ) , isCompatible = ! conditions . hasNext ( ) ; ! isCompatible && conditions . hasNext ( ) ; isCompatible = conditions . next ( ) . compatible ( properties ) ) ; return isCompatible ;
public class BaseFileCopier { /** * Return a remote destination temp dir path for the given node . If specified , the node attribute named { @ value * # FILE _ COPY _ DESTINATION _ DIR } is used , otherwise a temp directory appropriate for the os - family of the node is * returned . * @ param node the node entry * @ param project project * @ param framework framework * @ return a path to destination dir for the node */ public static String getRemoteDirForNode ( final INodeEntry node , final IRundeckProject project , final IFramework framework ) { } }
return util . getRemoteDirForNode ( node , project , framework ) ;
public class LessParser { /** * Parse a parameter list for a function . * @ return the operation */ @ Nonnull Operation parseParameterList ( ) { } }
Expression left = null ; char ch ; do { nesting ++ ; Expression expr = parseExpression ( ( char ) 0 ) ; nesting -- ; left = concat ( left , ';' , expr ) ; ch = read ( ) ; } while ( ch == ';' ) ; if ( ch != ')' ) { throw createException ( "Unrecognized input: '" + ch + "'" ) ; } if ( left == null ) { return new Operation ( reader ) ; } if ( left . getClass ( ) == Operation . class ) { switch ( ( ( Operation ) left ) . getOperator ( ) ) { case ',' : case ';' : return ( Operation ) left ; } } return new Operation ( reader , left , ',' ) ;
public class AWSApplicationDiscoveryClient { /** * Lists exports as specified by ID . All continuous exports associated with your user account can be listed if you * call < code > DescribeContinuousExports < / code > as is without passing any parameters . * @ param describeContinuousExportsRequest * @ return Result of the DescribeContinuousExports operation returned by the service . * @ throws AuthorizationErrorException * The AWS user account does not have permission to perform the action . Check the IAM policy associated with * this account . * @ throws InvalidParameterException * One or more parameters are not valid . Verify the parameters and try again . * @ throws InvalidParameterValueException * The value of one or more parameters are either invalid or out of range . Verify the parameter values and * try again . * @ throws ServerInternalErrorException * The server experienced an internal error . Try again . * @ throws OperationNotPermittedException * This operation is not permitted . * @ throws ResourceNotFoundException * The specified configuration ID was not located . Verify the configuration ID and try again . * @ sample AWSApplicationDiscovery . DescribeContinuousExports */ @ Override public DescribeContinuousExportsResult describeContinuousExports ( DescribeContinuousExportsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeContinuousExports ( request ) ;
public class GroupAdministrationHelper { /** * Delete a group from the group store * @ param key key of the group to be deleted * @ param user performing the delete operation */ public void deleteGroup ( String key , IPerson deleter ) { } }
if ( ! canDeleteGroup ( deleter , key ) ) { throw new RuntimeAuthorizationException ( deleter , IPermission . DELETE_GROUP_ACTIVITY , key ) ; } log . info ( "Deleting group with key " + key ) ; // find the current version of this group entity IEntityGroup group = GroupService . findGroup ( key ) ; // remove this group from the membership list of any current parent // groups for ( IEntityGroup parent : group . getParentGroups ( ) ) { parent . removeChild ( group ) ; parent . updateMembers ( ) ; } // delete the group group . delete ( ) ;
public class AWSIotClient { /** * Describe a thing group . * @ param describeThingGroupRequest * @ return Result of the DescribeThingGroup operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws InternalFailureException * An unexpected error has occurred . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ sample AWSIot . DescribeThingGroup */ @ Override public DescribeThingGroupResult describeThingGroup ( DescribeThingGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeThingGroup ( request ) ;
public class Source { /** * Sets the rolloff factor for attenuation . */ public void setRolloffFactor ( float rolloff ) { } }
if ( _rolloffFactor != rolloff ) { AL10 . alSourcef ( _id , AL10 . AL_ROLLOFF_FACTOR , _rolloffFactor = rolloff ) ; }
public class SerializerObjectMarshallingStrategy { /** * { @ inheritDoc } */ @ Override public byte [ ] marshal ( Context context , ObjectOutputStream os , Object object ) throws IOException { } }
return _serializer . serialize ( object , Object . class ) ;
public class NilpotentMaximum { /** * Computes the nilpotent maximum of two membership function values * @ param a is a membership function value * @ param b is a membership function value * @ return ` \ begin { cases } \ max ( a , b ) & \ mbox { if $ a + b < 0 $ } \ cr 1 & * \ mbox { otherwise } \ end { cases } ` */ @ Override public double compute ( double a , double b ) { } }
if ( Op . isLt ( a + b , 1.0 ) ) { return Op . max ( a , b ) ; } return 1.0 ;
public class RequestPatternTransformer { /** * Returns a RequestPatternBuilder matching a given Request */ @ Override public RequestPatternBuilder apply ( Request request ) { } }
final RequestPatternBuilder builder = new RequestPatternBuilder ( request . getMethod ( ) , urlEqualTo ( request . getUrl ( ) ) ) ; if ( headers != null && ! headers . isEmpty ( ) ) { for ( Map . Entry < String , CaptureHeadersSpec > header : headers . entrySet ( ) ) { String headerName = header . getKey ( ) ; if ( request . containsHeader ( headerName ) ) { CaptureHeadersSpec spec = header . getValue ( ) ; StringValuePattern headerMatcher = new EqualToPattern ( request . getHeader ( headerName ) , spec . getCaseInsensitive ( ) ) ; builder . withHeader ( headerName , headerMatcher ) ; } } } byte [ ] body = request . getBody ( ) ; if ( bodyPatternFactory != null && body != null && body . length > 0 ) { builder . withRequestBody ( bodyPatternFactory . forRequest ( request ) ) ; } return builder ;
public class AbstractAmazonSimpleEmailServiceAsync { /** * Simplified method form for invoking the GetSendQuota operation with an AsyncHandler . * @ see # getSendQuotaAsync ( GetSendQuotaRequest , com . amazonaws . handlers . AsyncHandler ) */ @ Override public java . util . concurrent . Future < GetSendQuotaResult > getSendQuotaAsync ( com . amazonaws . handlers . AsyncHandler < GetSendQuotaRequest , GetSendQuotaResult > asyncHandler ) { } }
return getSendQuotaAsync ( new GetSendQuotaRequest ( ) , asyncHandler ) ;
public class CobolDataItem { /** * Pretty printing for a condition entry . * @ param sb the string builder */ private void toStringCondition ( final StringBuilder sb ) { } }
if ( getConditionLiterals ( ) . size ( ) > 0 ) { toStringList ( sb , getConditionLiterals ( ) , "conditionLiterals" ) ; } if ( getConditionRanges ( ) . size ( ) > 0 ) { toStringList ( sb , getConditionRanges ( ) , "conditionRanges" ) ; }
public class StyledNamingConvention { public void dispose ( ) { } }
for ( final Iterator < Resources [ ] > it = existCheckerArrays . values ( ) . iterator ( ) ; it . hasNext ( ) ; ) { final Resources [ ] array = it . next ( ) ; for ( int i = 0 ; i < array . length ; ++ i ) { array [ i ] . close ( ) ; } } existCheckerArrays . clear ( ) ; initialized = false ;
public class SoapParser { /** * A method to parse and valdate the response of a SOAP server . * @ param xml * the SOAP message to retrieve and validate . May be an * InputStream object or a Document object . * @ param instruction * the SOAP Parser CTL excerpt * @ param logger * the PrintWriter to log all results to * @ return null if there were errors , the parsed document otherwise : it can * be the SOAP message , the SOAP message body content or a SOAP fult * @ author Simone Gianfranceschi */ private Document parse ( Object xml , Element instruction , PrintWriter logger ) throws Exception { } }
Document soapMessage = null ; String returnType = instruction . getAttribute ( "return" ) ; // envelope or // content ErrorHandlerImpl eh = new ErrorHandlerImpl ( "Parsing" , logger ) ; if ( xml instanceof InputStream ) { soapMessage = SoapUtils . getSOAPMessage ( ( InputStream ) xml ) ; } else if ( xml instanceof Document ) { soapMessage = ( Document ) xml ; } else { throw new Exception ( "Error: Invalid xml object" ) ; } if ( soapMessage != null && isSoapFault ( soapMessage ) ) { return parseSoapFault ( soapMessage , logger ) ; } eh . setRole ( "Validation" ) ; this . validateSoapMessage ( soapMessage , eh ) ; // Print errors int error_count = eh . getErrorCount ( ) ; int warning_count = eh . getWarningCount ( ) ; if ( error_count > 0 || warning_count > 0 ) { String msg = "" ; if ( error_count > 0 ) { msg += error_count + " validation error" + ( error_count == 1 ? "" : "s" ) ; if ( warning_count > 0 ) { msg += " and " ; } } if ( warning_count > 0 ) { msg += warning_count + " warning" + ( warning_count == 1 ? "" : "s" ) ; } msg += " detected." ; logger . println ( msg ) ; } if ( error_count > 0 ) { soapMessage = null ; } if ( soapMessage != null && returnType . equals ( "content" ) ) { return SoapUtils . getSoapBody ( soapMessage ) ; } return soapMessage ;
public class ZKUtil { /** * Helper to produce a valid path from variadic strings . */ public static String path ( String ... components ) { } }
String path = components [ 0 ] ; for ( int i = 1 ; i < components . length ; i ++ ) { path = ZKUtil . joinZKPath ( path , components [ i ] ) ; } return path ;
public class WWindow { /** * Returns a dynamic URL that this wwindow component can be accessed from . * @ return the URL to access this wwindow component . */ public String getUrl ( ) { } }
Environment env = getEnvironment ( ) ; Map < String , String > parameters = env . getHiddenParameters ( ) ; parameters . put ( WWINDOW_REQUEST_PARAM_KEY , getId ( ) ) ; // Override the step count with WWindow step parameters . put ( Environment . STEP_VARIABLE , String . valueOf ( getStep ( ) ) ) ; String url = env . getWServletPath ( ) ; return WebUtilities . getPath ( url , parameters , true ) ;
public class AsyncTwitterImpl { /** * / * Suggested Users Resources */ @ Override public void getUserSuggestions ( final String categorySlug ) { } }
getDispatcher ( ) . invokeLater ( new AsyncTask ( USER_SUGGESTIONS , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < User > users = twitter . getUserSuggestions ( categorySlug ) ; for ( TwitterListener listener : listeners ) { try { listener . gotUserSuggestions ( users ) ; } catch ( Exception e ) { logger . warn ( "Exception at getUserSuggestions" , e ) ; } } } } ) ;
public class DataMaskingRulesInner { /** * Creates or updates a database data masking rule . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database . * @ param dataMaskingRuleName The name of the data masking rule . * @ param parameters The required parameters for creating or updating a data masking rule . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DataMaskingRuleInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName , String dataMaskingRuleName , DataMaskingRuleInner parameters , final ServiceCallback < DataMaskingRuleInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , dataMaskingRuleName , parameters ) , serviceCallback ) ;
public class CacheEntry { /** * Return an input stream to the layer . Has side effect of setting the * appropriate Content - Type , Content - Length and Content - Encoding headers * in the response . * @ param request * the request object * @ param sourceMapResult * ( Output ) mutable object reference to the source map . May be null * if source maps are not being requested . * @ return The InputStream for the built layer * @ throws IOException */ public InputStream getInputStream ( HttpServletRequest request , MutableObject < byte [ ] > sourceMapResult ) throws IOException { } }
// Check bytes before filename when reading and reverse order when setting . // The following local variables intentionally hide the instance variables . byte [ ] bytes = this . bytes ; byte [ ] sourceMap = this . sourceMap ; String filename = this . filename ; InputStream result = null ; if ( bytes != null ) { // Cache data is already in memory . Don ' t need to de - serialize it . result = new ByteArrayInputStream ( bytes ) ; if ( sourceMapResult != null && sourceMapSize > 0 ) { sourceMapResult . setValue ( sourceMap ) ; } } else if ( filename != null ) { // De - serialize data from cache ICacheManager cmgr = ( ( IAggregator ) request . getAttribute ( IAggregator . AGGREGATOR_REQATTRNAME ) ) . getCacheManager ( ) ; File file = new File ( cmgr . getCacheDir ( ) , filename ) ; if ( sourceMapSize == 0 ) { // No source map data in cache entry so just stream the file . result = new FileInputStream ( file ) ; } else { // Entry contains source map data so that means it ' s a serialized CacheData // instance . De - serialize the object and extract the data . CacheData data ; ObjectInputStream is = new ObjectInputStream ( new FileInputStream ( file ) ) ; try { data = ( CacheData ) is . readObject ( ) ; } catch ( ClassNotFoundException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } finally { IOUtils . closeQuietly ( is ) ; } bytes = data . bytes ; sourceMap = data . sourceMap ; if ( sourceMapResult != null ) { sourceMapResult . setValue ( sourceMap ) ; } result = new ByteArrayInputStream ( bytes ) ; } } else { throw new IOException ( ) ; } return result ;
public class ReportServiceLogger { /** * Logs the specified request and response information . * < p > Note that in order to avoid any temptation to consume the contents of the response , this * does < em > not < / em > take an { @ link com . google . api . client . http . HttpResponse } object , but instead * accepts the status code and message from the response . */ public void logRequest ( @ Nullable HttpRequest request , int statusCode , @ Nullable String statusMessage ) { } }
boolean isSuccess = HttpStatusCodes . isSuccess ( statusCode ) ; if ( ! loggerDelegate . isSummaryLoggable ( isSuccess ) && ! loggerDelegate . isDetailsLoggable ( isSuccess ) ) { return ; } // Populate the RequestInfo builder from the request . RequestInfo requestInfo = buildRequestInfo ( request ) ; // Populate the ResponseInfo builder from the response . ResponseInfo responseInfo = buildResponseInfo ( request , statusCode , statusMessage ) ; RemoteCallReturn . Builder remoteCallReturnBuilder = new RemoteCallReturn . Builder ( ) . withRequestInfo ( requestInfo ) . withResponseInfo ( responseInfo ) ; if ( ! isSuccess ) { remoteCallReturnBuilder . withException ( new ReportException ( String . format ( "%s: %s" , statusCode , statusMessage ) ) ) ; } RemoteCallReturn remoteCallReturn = remoteCallReturnBuilder . build ( ) ; loggerDelegate . logRequestSummary ( remoteCallReturn ) ; loggerDelegate . logRequestDetails ( remoteCallReturn ) ;
public class DecimalFormat { /** * Appends the passed { @ code suffix } chars to given result * { @ code container } . Updates { @ code fastPathData . lastFreeIndex } * accordingly . * @ param suffix The suffix characters to append to result . * @ param len The number of chars to append . * @ param container Char array container which to append the suffix */ private void appendSuffix ( char [ ] suffix , int len , char [ ] container ) { } }
int startIndex = fastPathData . lastFreeIndex ; // If suffix to append is only 1 char long , just assigns this char . // If suffix is less or equal 4 , we use a dedicated algorithm that // has shown to run faster than System . arraycopy . // If more than 4 , we use System . arraycopy . if ( len == 1 ) container [ startIndex ] = suffix [ 0 ] ; else if ( len <= 4 ) { int dstLower = startIndex ; int dstUpper = dstLower + len - 1 ; int srcUpper = len - 1 ; container [ dstLower ] = suffix [ 0 ] ; container [ dstUpper ] = suffix [ srcUpper ] ; if ( len > 2 ) container [ ++ dstLower ] = suffix [ 1 ] ; if ( len == 4 ) container [ -- dstUpper ] = suffix [ 2 ] ; } else System . arraycopy ( suffix , 0 , container , startIndex , len ) ; fastPathData . lastFreeIndex += len ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcBuildingElement ( ) { } }
if ( ifcBuildingElementEClass == null ) { ifcBuildingElementEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 58 ) ; } return ifcBuildingElementEClass ;
public class Humanize { /** * Computes both past and future relative dates . * E . g . ' one day ago ' , ' one day from now ' , ' 10 years ago ' , ' 3 minutes from * now ' , ' right now ' and so on . * @ param reference * The reference * @ param duration * The duration * @ return String representing the relative date */ public static String naturalTime ( Date reference , Date duration ) { } }
return context . get ( ) . formatRelativeDate ( reference , duration ) ;
public class MaxConnectionIdleManager { /** * There are no outstanding RPCs on the transport . */ void onTransportIdle ( ) { } }
isActive = false ; if ( shutdownFuture == null ) { return ; } if ( shutdownFuture . isDone ( ) ) { shutdownDelayed = false ; shutdownFuture = scheduler . schedule ( shutdownTask , maxConnectionIdleInNanos , TimeUnit . NANOSECONDS ) ; } else { nextIdleMonitorTime = ticker . nanoTime ( ) + maxConnectionIdleInNanos ; }
public class JsonConvert { /** * 返回非null的值是由String 、 ArrayList 、 HashMap任意组合的对象 */ public < V > V convertFrom ( final InputStream in ) { } }
if ( in == null ) return null ; return ( V ) new AnyDecoder ( factory ) . convertFrom ( new JsonStreamReader ( in ) ) ;
public class Benchmark { /** * Sphere function with noise */ static public double sphere_noise ( double [ ] x ) { } }
double sum = 0.0 ; for ( int i = 0 ; i < x . length ; i ++ ) { sum += x [ i ] * x [ i ] ; } // NOISE // Comment the next line to remove the noise sum *= ( 1.0 + 0.1 * Math . abs ( random . nextGaussian ( ) ) ) ; return ( sum ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcClassification ( ) { } }
if ( ifcClassificationEClass == null ) { ifcClassificationEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 79 ) ; } return ifcClassificationEClass ;
public class JobOperationsController { /** * Borrowed from CommandLineJobRunner . * @ param job * the job that we need to find the next parameters for * @ return the next job parameters if they can be located * @ throws JobParametersNotFoundException * if there is a problem */ private JobParameters getNextJobParameters ( Job job ) throws JobParametersNotFoundException { } }
String jobIdentifier = job . getName ( ) ; JobParameters jobParameters ; List < JobInstance > lastInstances = jobExplorer . getJobInstances ( jobIdentifier , 0 , 1 ) ; JobParametersIncrementer incrementer = job . getJobParametersIncrementer ( ) ; if ( lastInstances . isEmpty ( ) ) { jobParameters = incrementer . getNext ( new JobParameters ( ) ) ; if ( jobParameters == null ) { throw new JobParametersNotFoundException ( "No bootstrap parameters found from incrementer for job=" + jobIdentifier ) ; } } else { List < JobExecution > lastExecutions = jobExplorer . getJobExecutions ( lastInstances . get ( 0 ) ) ; jobParameters = incrementer . getNext ( lastExecutions . get ( 0 ) . getJobParameters ( ) ) ; } return jobParameters ;
public class LineItemCreativeAssociationStats { /** * Sets the creativeSetStats value for this LineItemCreativeAssociationStats . * @ param creativeSetStats * A map containing { @ link Stats } objects for each creative belonging * to * a creative set , { @ code null } for non creative set * associations . */ public void setCreativeSetStats ( com . google . api . ads . admanager . axis . v201808 . Long_StatsMapEntry [ ] creativeSetStats ) { } }
this . creativeSetStats = creativeSetStats ;