signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JDBCSQLXML { /** * Retrieves a new Result for setting the XML value designated by this
* SQLXML instance .
* @ param resultClass The class of the result , or null .
* @ throws java . sql . SQLException if there is an error processing the XML
* value or the state is not writable
* @ return for setting the XML value designated by this SQLXML instance . */
protected < T extends Result > T createResult ( Class < T > resultClass ) throws SQLException { } } | checkWritable ( ) ; setWritable ( false ) ; setReadable ( true ) ; if ( JAXBResult . class . isAssignableFrom ( resultClass ) ) { // Must go first presently , since JAXBResult extends SAXResult
// ( purely as an implmentation detail ) and it ' s not possible
// to instantiate a valid JAXBResult with a Zero - Args
// constructor ( or any subclass thereof , due to the finality of
// its private UnmarshallerHandler )
// FALL THROUGH . . . will throw an exception
} else if ( ( resultClass == null ) || StreamResult . class . isAssignableFrom ( resultClass ) ) { return createStreamResult ( resultClass ) ; } else if ( DOMResult . class . isAssignableFrom ( resultClass ) ) { return createDOMResult ( resultClass ) ; } else if ( SAXResult . class . isAssignableFrom ( resultClass ) ) { return createSAXResult ( resultClass ) ; } else if ( StAXResult . class . isAssignableFrom ( resultClass ) ) { return createStAXResult ( resultClass ) ; } throw Util . invalidArgument ( "resultClass: " + resultClass ) ; |
public class AnalysisCache { /** * Analyze a method .
* @ param classContext
* ClassContext storing method analysis objects for method ' s
* class
* @ param analysisClass
* class the method analysis object should belong to
* @ param methodDescriptor
* method descriptor identifying the method to analyze
* @ return the computed analysis object for the method
* @ throws CheckedAnalysisException */
@ SuppressWarnings ( "unchecked" ) private < E > E analyzeMethod ( ClassContext classContext , Class < E > analysisClass , MethodDescriptor methodDescriptor ) throws CheckedAnalysisException { } } | IMethodAnalysisEngine < E > engine = ( IMethodAnalysisEngine < E > ) methodAnalysisEngineMap . get ( analysisClass ) ; if ( engine == null ) { throw new IllegalArgumentException ( "No analysis engine registered to produce " + analysisClass . getName ( ) ) ; } Profiler profiler = getProfiler ( ) ; profiler . start ( engine . getClass ( ) ) ; try { return engine . analyze ( this , methodDescriptor ) ; } finally { profiler . end ( engine . getClass ( ) ) ; } |
public class ValidateApplicationMojo { /** * Reports errors ( in the logger and in a file ) .
* @ param errors
* @ throws IOException */
private void reportErrors ( Collection < RoboconfError > errors ) throws IOException { } } | // Add a log entry
getLog ( ) . info ( "Generating a report for validation errors under " + MavenPluginConstants . VALIDATION_RESULT_PATH ) ; // Generate the report ( file and console too )
StringBuilder sb = MavenPluginUtils . formatErrors ( errors , getLog ( ) ) ; // Write the report .
// Reporting only makes sense when there is an error or a warning .
File targetFile = new File ( this . project . getBasedir ( ) , MavenPluginConstants . VALIDATION_RESULT_PATH ) ; Utils . createDirectory ( targetFile . getParentFile ( ) ) ; Utils . writeStringInto ( sb . toString ( ) , targetFile ) ; |
public class BdbUtil { /** * Creates a unique directory for housing a BDB environment , and returns
* its name .
* @ param prefix a prefix for the temporary directory ' s name . Cannot be
* < code > null < / code > .
* @ param suffix a suffix for the temporary directory ' s name .
* @ return the environment name to use .
* @ param directory the parent directory to use .
* @ throws IOException if an error occurred in creating the temporary
* directory . */
public static String uniqueEnvironment ( String prefix , String suffix , File directory ) throws IOException { } } | File tmpDir = UNIQUE_DIRECTORY_CREATOR . create ( prefix , suffix , directory ) ; String randomFilename = UUID . randomUUID ( ) . toString ( ) ; File envNameAsFile = new File ( tmpDir , randomFilename ) ; return envNameAsFile . getAbsolutePath ( ) ; |
public class JobClasspathHelper { /** * This method creates an file that contains a line with a MD5 sum
* @ param fs
* FileSystem where to create the file .
* @ param md5sum
* The string containing the MD5 sum .
* @ param remoteMd5Path
* The path where to save the file .
* @ throws IOException */
private void createMd5SumFile ( FileSystem fs , String md5sum , Path remoteMd5Path ) throws IOException { } } | FSDataOutputStream os = null ; try { os = fs . create ( remoteMd5Path , true ) ; os . writeBytes ( md5sum ) ; os . flush ( ) ; } catch ( Exception e ) { LOG . error ( "{}" , e ) ; } finally { if ( os != null ) { os . close ( ) ; } } |
public class XmlMarshallingValidationCallback { /** * Creates the payload source for unmarshalling .
* @ param payload
* @ return */
private Source getPayloadSource ( Object payload ) { } } | Source source = null ; if ( payload instanceof String ) { source = new StringSource ( ( String ) payload ) ; } else if ( payload instanceof File ) { source = new StreamSource ( ( File ) payload ) ; } else if ( payload instanceof Document ) { source = new DOMSource ( ( Document ) payload ) ; } else if ( payload instanceof Source ) { source = ( Source ) payload ; } if ( source == null ) { throw new CitrusRuntimeException ( "Failed to create payload source for unmarshalling message" ) ; } return source ; |
public class JPAValidator { /** * Obtains the underlying javax . validation . Validator instance that is used by
* this wrapper .
* The underlying javax . validation . Validator instance is obtained in one of
* two ways .
* If this JPAValidatorWrapper was obtained from the JPAValidatorFactory ,
* then the Validator instance is obtained from the ValidatorFactory instance .
* On the other hand , if this JPAValidatorWrapper was obtained from the
* JPAValidatorContextWrapper , then the Validator instance is obtained from the
* ValidatorContext , and the specified TraversableResolver , MessageInterpolator ,
* and ConstraintValidatorFactory instances are used as part of this process . */
private void obtainValidator ( ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( fromValidatorFactory ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Obtaining Validator instance from ValidatorFactory..." ) ; } ValidatorFactory validatorFactory = ivValidatorFactoryLocator . getValidatorFactory ( ) ; ivValidator = validatorFactory . getValidator ( ) ; } else { if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Obtaining Validator instance from ValidatorContext, using TraversableResolver " + ivSpecifiedTraversableResolver + ", message interpolator " + ivSpecifiedMessageInterpolator + ", parameter name provider " + ivSpecifiedParameterNameProvider + ", and constraint validator factory " + ivSpecifiedConstraintValidatorFactory ) ; } ValidatorFactory validatorFactory = ivValidatorFactoryLocator . getValidatorFactory ( ) ; ValidatorContext validatorContext = validatorFactory . usingContext ( ) ; if ( ivSpecifiedTraversableResolver != null ) { validatorContext . traversableResolver ( ivSpecifiedTraversableResolver ) ; } if ( ivSpecifiedMessageInterpolator != null ) { validatorContext . messageInterpolator ( ivSpecifiedMessageInterpolator ) ; } if ( ivSpecifiedConstraintValidatorFactory != null ) { validatorContext . constraintValidatorFactory ( ivSpecifiedConstraintValidatorFactory ) ; } if ( ivSpecifiedParameterNameProvider != null ) validatorContext . parameterNameProvider ( ivSpecifiedParameterNameProvider ) ; ivValidator = validatorContext . getValidator ( ) ; } if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Obtained the Validator: " + ivValidator ) ; } |
public class DRL5Expressions { /** * $ ANTLR start synpred17 _ DRL5Expressions */
public final void synpred17_DRL5Expressions_fragment ( ) throws RecognitionException { } } | // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 521:8 : ( LEFT _ PAREN primitiveType )
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 521:9 : LEFT _ PAREN primitiveType
{ match ( input , LEFT_PAREN , FOLLOW_LEFT_PAREN_in_synpred17_DRL5Expressions2586 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_primitiveType_in_synpred17_DRL5Expressions2588 ) ; primitiveType ( ) ; state . _fsp -- ; if ( state . failed ) return ; } |
public class SmbFile { /** * If the path of this < code > SmbFile < / code > falls within a DFS volume ,
* this method will return the referral path to which it maps . Otherwise
* < code > null < / code > is returned . */
public String getDfsPath ( ) throws SmbException { } } | resolveDfs ( null ) ; if ( dfsReferral == null ) { return null ; } String path = "smb1:/" + dfsReferral . server + "/" + dfsReferral . share + unc ; path = path . replace ( '\\' , '/' ) ; if ( isDirectory ( ) ) { path += '/' ; } return path ; |
public class Utils { /** * Checks the server for an updated list of Calendars ( in the background ) .
* If a Calendar is added on the web ( and it is selected and not
* hidden ) then it will be added to the list of calendars on the phone
* ( when this finishes ) . When a new calendar from the
* web is added to the phone , then the events for that calendar are also
* downloaded from the web .
* This sync is done automatically in the background when the
* SelectCalendars activity and fragment are started .
* @ param account - The account to sync . May be null to sync all accounts . */
public static void startCalendarMetafeedSync ( Account account ) { } } | Bundle extras = new Bundle ( ) ; extras . putBoolean ( ContentResolver . SYNC_EXTRAS_MANUAL , true ) ; extras . putBoolean ( "metafeedonly" , true ) ; ContentResolver . requestSync ( account , Calendars . CONTENT_URI . getAuthority ( ) , extras ) ; |
public class CommerceOrderPaymentPersistenceImpl { /** * Clears the cache for all commerce order payments .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( ) { } } | entityCache . clearCache ( CommerceOrderPaymentImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; |
public class Labeling { /** * labeling observation sequences .
* @ param file contains a list of observation sequence , this file has a format wich can be read by DataReader
* @ return string representing label sequences , the format is specified by writer */
public String strLabeling ( File file ) { } } | List < Sentence > obsvSeqs = dataReader . readFile ( file . getPath ( ) ) ; List lblSeqs = labeling ( obsvSeqs ) ; String ret = dataWriter . writeString ( lblSeqs ) ; return ret ; |
import java . io . * ; import java . lang . * ; import java . util . * ; class MaximumAdjacentProduct { /** * This function calculates the maximum product of two adjacent elements in an integer list .
* Args :
* integer _ list : A list of integers
* Returns :
* The highest product of two adjacent numbers in the list .
* Examples :
* > > > maximum _ adjacent _ product ( [ 1 , 2 , 3 , 4 , 5 , 6 ] )
* 30
* > > > maximum _ adjacent _ product ( [ 1 , 2 , 3 , 4 , 5 ] )
* 20
* > > > maximum _ adjacent _ product ( [ 2 , 3 ] ) */
public static int maximumAdjacentProduct ( List < Integer > integer_list ) { } } | int max_product = integer_list . get ( 0 ) * integer_list . get ( 1 ) ; for ( int i = 1 ; i < integer_list . size ( ) - 1 ; i ++ ) { int product = integer_list . get ( i ) * integer_list . get ( i + 1 ) ; if ( product > max_product ) { max_product = product ; } } return max_product ; |
public class UnitQuaternions { /** * The orientation represents the rotation of the principal axes with
* respect to the axes of the coordinate system ( unit vectors [ 1,0,0 ] ,
* [ 0,1,0 ] and [ 0,0,1 ] ) .
* The orientation can be expressed as a unit quaternion .
* @ param points
* array of Point3d
* @ return the orientation of the point cloud as a unit quaternion */
public static Quat4d orientation ( Point3d [ ] points ) { } } | MomentsOfInertia moi = new MomentsOfInertia ( ) ; for ( Point3d p : points ) moi . addPoint ( p , 1.0 ) ; // Convert rotation matrix to quaternion
Quat4d quat = new Quat4d ( ) ; quat . set ( moi . getOrientationMatrix ( ) ) ; return quat ; |
public class DecisionTableImpl { /** * Each hit results in one output value ( multiple outputs are collected into a single context value ) */
private Object hitToOutput ( EvaluationContext ctx , FEEL feel , DTDecisionRule rule ) { } } | List < CompiledExpression > outputEntries = rule . getOutputEntry ( ) ; Map < String , Object > values = ctx . getAllValues ( ) ; if ( outputEntries . size ( ) == 1 ) { Object value = feel . evaluate ( outputEntries . get ( 0 ) , values ) ; return value ; } else { Map < String , Object > output = new HashMap < > ( ) ; for ( int i = 0 ; i < outputs . size ( ) ; i ++ ) { output . put ( outputs . get ( i ) . getName ( ) , feel . evaluate ( outputEntries . get ( i ) , values ) ) ; } return output ; } |
public class EnumUtil { /** * long重新解析为若干个枚举值 , 用于使用long保存多个选项的情况 . */
public static < E extends Enum < E > > EnumSet < E > processBits ( final Class < E > enumClass , final long value ) { } } | return EnumUtils . processBitVector ( enumClass , value ) ; |
public class Preconditions { /** * Checks if the URI is valid for streaming to .
* @ param uri The URI to check
* @ return The passed in URI if it is valid
* @ throws IllegalArgumentException if the URI is not valid . */
public static URI checkValidStream ( URI uri ) throws IllegalArgumentException { } } | String scheme = checkNotNull ( uri ) . getScheme ( ) ; scheme = checkNotNull ( scheme , "URI is missing a scheme" ) . toLowerCase ( ) ; if ( rtps . contains ( scheme ) ) { return uri ; } if ( udpTcp . contains ( scheme ) ) { if ( uri . getPort ( ) == - 1 ) { throw new IllegalArgumentException ( "must set port when using udp or tcp scheme" ) ; } return uri ; } throw new IllegalArgumentException ( "not a valid output URL, must use rtp/tcp/udp scheme" ) ; |
public class MaintenanceScheduleHelper { /** * expression or duration is wrong ) , we simply return empty value */
@ SuppressWarnings ( "squid:S1166" ) public static Optional < ZonedDateTime > getNextMaintenanceWindow ( final String cronSchedule , final String duration , final String timezone ) { } } | try { final ExecutionTime scheduleExecutor = ExecutionTime . forCron ( getCronFromExpression ( cronSchedule ) ) ; final ZonedDateTime now = ZonedDateTime . now ( ZoneOffset . of ( timezone ) ) ; final ZonedDateTime after = now . minus ( convertToISODuration ( duration ) ) ; final ZonedDateTime next = scheduleExecutor . nextExecution ( after ) ; return Optional . of ( next ) ; } catch ( final RuntimeException ignored ) { return Optional . empty ( ) ; } |
public class MachinetagsApi { /** * Return a list of unique namespace and predicate pairs , optionally limited by predicate or namespace , in alphabetical order .
* < br >
* This method does not require authentication .
* @ param namespace ( Optional ) Limit the list of pairs returned to those that have this namespace .
* @ param predicate ( Optional ) Limit the list of pairs returned to those that have this predicate .
* @ param perPage Number of photos to return per page . If this argument is less than 1 , it defaults to 100 . The maximum allowed value is 500.
* @ param page The page of results to return . If this argument is less than 1 , it defaults to 1.
* @ param sign if true , the request will be signed .
* @ return object containing a list of unique namespace and predicate parts .
* @ throws JinxException if there are any errors .
* @ see < a href = " https : / / www . flickr . com / services / api / flickr . machinetags . getPairs . html " > flickr . machinetags . getPairs < / a > */
public Pairs getPairs ( String namespace , String predicate , int perPage , int page , boolean sign ) throws JinxException { } } | Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.machinetags.getPairs" ) ; if ( ! JinxUtils . isNullOrEmpty ( namespace ) ) { params . put ( "namespace" , namespace ) ; } if ( ! JinxUtils . isNullOrEmpty ( predicate ) ) { params . put ( "predicate" , predicate ) ; } if ( perPage > 0 ) { params . put ( "per_page" , Integer . toString ( perPage ) ) ; } if ( page > 0 ) { params . put ( "page" , Integer . toString ( page ) ) ; } return jinx . flickrGet ( params , Pairs . class , sign ) ; |
public class LTPAConfigurationImpl { /** * When the configuration is modified ,
* < pre >
* 1 . If file name and expiration changed ,
* then remove the file monitor registration and reload LTPA keys .
* 2 . Else if only the monitor interval changed ,
* then remove the file monitor registration and optionally create a new file monitor .
* 3 . ( Implicit ) Else if only the key password changed ,
* then do not remove the file monitor registration and do not reload the LTPA keys .
* < / pre > */
protected void modified ( Map < String , Object > props ) { } } | String oldKeyImportFile = keyImportFile ; Long oldKeyTokenExpiration = keyTokenExpiration ; Long oldMonitorInterval = monitorInterval ; loadConfig ( props ) ; if ( isKeysConfigChanged ( oldKeyImportFile , oldKeyTokenExpiration ) ) { unsetFileMonitorRegistration ( ) ; Tr . audit ( tc , "LTPA_KEYS_TO_LOAD" , keyImportFile ) ; setupRuntimeLTPAInfrastructure ( ) ; } else if ( isMonitorIntervalChanged ( oldMonitorInterval ) ) { unsetFileMonitorRegistration ( ) ; optionallyCreateFileMonitor ( ) ; } |
public class InvocationHandlerAdapter { /** * Creates an implementation for any instance of an { @ link java . lang . reflect . InvocationHandler } that delegates
* all method interceptions to the given instance which will be stored in a { @ code static } field .
* @ param invocationHandler The invocation handler to which all method calls are delegated .
* @ param fieldName The name of the field .
* @ return An implementation that delegates all method interceptions to the given invocation handler . */
public static InvocationHandlerAdapter of ( InvocationHandler invocationHandler , String fieldName ) { } } | return new ForInstance ( fieldName , CACHED , UNPRIVILEGED , Assigner . DEFAULT , invocationHandler ) ; |
public class Async { /** * Starts the server . */
public void start ( ) { } } | try { jmsServer . setConfiguration ( config ) ; jmsServer . setJmsConfiguration ( jmsConfig ) ; jmsServer . start ( ) ; ConnectionFactory connectionFactory = ( ConnectionFactory ) jmsServer . lookup ( "/cf" ) ; if ( connectionFactory == null ) { throw new AsyncException ( "Failed to start EmbeddedJMS server due to previous errors." ) ; } consumerConnection = connectionFactory . createConnection ( ) ; receiverSessionPool = new SessionPool ( "Consumer" , consumerConnection ) ; producerConnection = connectionFactory . createConnection ( ) ; senderSessionPool = new SessionPool ( "Producer" , producerConnection ) ; configureListeners ( injector , queueConfigsList ) ; started = true ; } catch ( Exception e ) { throw new AsyncException ( e ) ; } |
public class DefaultCurrencyUnitDataProvider { /** * loads a file */
private List < String > loadFromFile ( String fileName ) throws Exception { } } | try ( InputStream in = getClass ( ) . getResourceAsStream ( fileName ) ) { if ( in == null ) { throw new FileNotFoundException ( "Data file " + fileName + " not found" ) ; } BufferedReader reader = new BufferedReader ( new InputStreamReader ( in , "UTF-8" ) ) ; String line ; List < String > content = new ArrayList < > ( ) ; while ( ( line = reader . readLine ( ) ) != null ) { content . add ( line ) ; } return content ; } |
public class PrefixedProperties { /** * ( non - Javadoc )
* @ see java . util . Hashtable # contains ( java . lang . Object ) */
@ Override public boolean contains ( final Object value ) { } } | lock . readLock ( ) . lock ( ) ; try { if ( value == null ) { return false ; } for ( @ SuppressWarnings ( "rawtypes" ) final Map . Entry entry : entrySet ( ) ) { final Object otherValue = entry . getValue ( ) ; if ( otherValue != null && otherValue . equals ( value ) ) { return true ; } } return false ; } finally { lock . readLock ( ) . unlock ( ) ; } |
public class XMLScanListener { /** * Do whatever processing that needs to be done on this file . */
public void moveThisFile ( File fileSource , File fileDestDir , String strDestName ) { } } | try { fileDestDir . mkdirs ( ) ; FileInputStream fileIn = new FileInputStream ( fileSource ) ; InputStreamReader inStream = new InputStreamReader ( fileIn ) ; StreamSource source = new StreamSource ( inStream ) ; System . out . println ( fileDestDir + " " + strDestName ) ; File fileDest = new File ( fileDestDir , strDestName ) ; fileDest . createNewFile ( ) ; FileOutputStream fileOut = new FileOutputStream ( fileDest ) ; PrintWriter dataOut = new PrintWriter ( fileOut ) ; StreamResult dest = new StreamResult ( dataOut ) ; m_transformer . transform ( source , dest ) ; dataOut . close ( ) ; fileOut . close ( ) ; inStream . close ( ) ; fileIn . close ( ) ; } catch ( TransformerException ex ) { ex . printStackTrace ( ) ; } catch ( FileNotFoundException ex ) { ex . printStackTrace ( ) ; } catch ( IOException ex ) { ex . printStackTrace ( ) ; } |
public class CharUtils { /** * < p > Converts the character to the Integer it represents , throwing an
* exception if the character is not numeric . < / p >
* < p > This method converts the char ' 1 ' to the int 1 and so on . < / p >
* < pre >
* CharUtils . toIntValue ( null , - 1 ) = - 1
* CharUtils . toIntValue ( ' 3 ' , - 1 ) = 3
* CharUtils . toIntValue ( ' A ' , - 1 ) = - 1
* < / pre >
* @ param ch the character to convert
* @ param defaultValue the default value to use if the character is not numeric
* @ return the int value of the character */
public static int toIntValue ( final Character ch , final int defaultValue ) { } } | if ( ch == null ) { return defaultValue ; } return toIntValue ( ch . charValue ( ) , defaultValue ) ; |
public class Node { /** * - - other */
public void xslt ( Transformer transformer , Node dest ) throws IOException , TransformerException { } } | try ( InputStream in = newInputStream ( ) ; OutputStream out = dest . newOutputStream ( ) ) { transformer . transform ( new StreamSource ( in ) , new StreamResult ( out ) ) ; } |
public class Vector3i { /** * { @ inheritDoc } */
@ Override public void normalize ( ) { } } | double norm ; norm = 1. / Math . sqrt ( this . x * this . x + this . y * this . y + this . z * this . z ) ; this . x *= norm ; this . y *= norm ; this . z *= norm ; |
public class VirtualNetworksInner { /** * Gets all virtual networks in a subscription .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; VirtualNetworkInner & gt ; object */
public Observable < Page < VirtualNetworkInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < VirtualNetworkInner > > , Page < VirtualNetworkInner > > ( ) { @ Override public Page < VirtualNetworkInner > call ( ServiceResponse < Page < VirtualNetworkInner > > response ) { return response . body ( ) ; } } ) ; |
public class Utility { /** * Encode a run , possibly a degenerate run ( of < 4 values ) .
* @ param length The length of the run ; must be > 0 & & < = 0xFF . */
private static final < T extends Appendable > void encodeRun ( T buffer , byte value , int length , byte [ ] state ) { } } | if ( length < 4 ) { for ( int j = 0 ; j < length ; ++ j ) { if ( value == ESCAPE_BYTE ) appendEncodedByte ( buffer , ESCAPE_BYTE , state ) ; appendEncodedByte ( buffer , value , state ) ; } } else { if ( ( byte ) length == ESCAPE_BYTE ) { if ( value == ESCAPE_BYTE ) appendEncodedByte ( buffer , ESCAPE_BYTE , state ) ; appendEncodedByte ( buffer , value , state ) ; -- length ; } appendEncodedByte ( buffer , ESCAPE_BYTE , state ) ; appendEncodedByte ( buffer , ( byte ) length , state ) ; appendEncodedByte ( buffer , value , state ) ; // Don ' t need to escape this value
} |
public class ZWaveFrameDecoder { /** * Creates a Z - Wave DataFrame from a ByteBuf .
* @ param buf the buffer to process
* @ return a DataFrame instance ( or null if a valid one wasn ' t found ) */
private DataFrame createDataFrame ( ByteBuf buf ) { } } | if ( buf . readableBytes ( ) > 3 ) { byte messageType = buf . getByte ( buf . readerIndex ( ) + 3 ) ; switch ( messageType ) { case Version . ID : return new Version ( buf ) ; case MemoryGetId . ID : return new MemoryGetId ( buf ) ; case InitData . ID : return new InitData ( buf ) ; case NodeProtocolInfo . ID : return new NodeProtocolInfo ( buf ) ; case SendData . ID : return new SendData ( buf ) ; case ApplicationCommand . ID : return new ApplicationCommand ( buf ) ; case ApplicationUpdate . ID : return new ApplicationUpdate ( buf ) ; case RequestNodeInfo . ID : return new RequestNodeInfo ( buf ) ; case GetRoutingInfo . ID : return new GetRoutingInfo ( buf ) ; case GetSUCNodeId . ID : return new GetSUCNodeId ( buf ) ; case AddNodeToNetwork . ID : return new AddNodeToNetwork ( buf ) ; case RemoveNodeFromNetwork . ID : return new RemoveNodeFromNetwork ( buf ) ; case SetDefault . ID : return new SetDefault ( buf ) ; } } return null ; |
public class IOUtilities { /** * Transfers bytes from an input stream to an output stream .
* Callers of this method are responsible for closing the streams
* since they are the ones that opened the streams . */
public static void transfer ( InputStream in , OutputStream out , TransferCallback cb ) throws IOException { } } | byte [ ] bytes = new byte [ TRANSFER_BUFFER ] ; int count ; while ( ( count = in . read ( bytes ) ) != - 1 ) { out . write ( bytes , 0 , count ) ; if ( cb != null ) { cb . bytesTransferred ( bytes , count ) ; if ( cb . isCancelled ( ) ) { break ; } } } |
public class MessagesApi { /** * Get Normalized Message Histogram
* Get Histogram on normalized messages .
* @ param startDate Timestamp of earliest message ( in milliseconds since epoch ) . ( required )
* @ param endDate Timestamp of latest message ( in milliseconds since epoch ) . ( required )
* @ param sdid Source device ID of the messages being searched . ( optional )
* @ param field Message field being queried for building histogram . ( optional )
* @ param interval Interval of time for building histogram blocks . ( Valid values : minute , hour , day , month , year ) ( optional )
* @ return AggregatesHistogramResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public AggregatesHistogramResponse getAggregatesHistogram ( Long startDate , Long endDate , String sdid , String field , String interval ) throws ApiException { } } | ApiResponse < AggregatesHistogramResponse > resp = getAggregatesHistogramWithHttpInfo ( startDate , endDate , sdid , field , interval ) ; return resp . getData ( ) ; |
public class IndexingConfigurationBuilder { /** * Enable or disable indexing .
* @ deprecated Use { @ link # index ( Index ) } instead */
@ Deprecated public IndexingConfigurationBuilder enabled ( boolean enabled ) { } } | Attribute < Index > index = attributes . attribute ( INDEX ) ; if ( index . get ( ) == Index . NONE & enabled ) index . set ( Index . ALL ) ; else if ( ! enabled ) index . set ( Index . NONE ) ; return this ; |
public class RunJobFlowRequest { /** * For Amazon EMR releases 4.0 and later . The list of configurations supplied for the EMR cluster you are creating .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setConfigurations ( java . util . Collection ) } or { @ link # withConfigurations ( java . util . Collection ) } if you want
* to override the existing values .
* @ param configurations
* For Amazon EMR releases 4.0 and later . The list of configurations supplied for the EMR cluster you are
* creating .
* @ return Returns a reference to this object so that method calls can be chained together . */
public RunJobFlowRequest withConfigurations ( Configuration ... configurations ) { } } | if ( this . configurations == null ) { setConfigurations ( new com . amazonaws . internal . SdkInternalList < Configuration > ( configurations . length ) ) ; } for ( Configuration ele : configurations ) { this . configurations . add ( ele ) ; } return this ; |
public class ProbeManagerImpl { /** * Update the appropriate collections to reflect recently activated probes
* for the specified listener .
* @ param listener the listener with recently activated probes
* @ param probes the collection of probes that were activated */
public void addActiveProbesforListener ( ProbeListener listener , Collection < ProbeImpl > probes ) { } } | // Add the probes for the specified listener . This is purely additive .
// Since a listener ' s configuration can ' t be updated after registration ,
// we ' re adding probes for recently initialized / modified classes .
addProbesByListener ( listener , probes ) ; for ( ProbeImpl probe : probes ) { addListenerByProbe ( probe , listener ) ; } |
public class CmsModulesEdit { /** * Creates the dialog HTML for all defined widgets of the named dialog ( page ) . < p >
* @ param dialog the dialog ( page ) to get the HTML for
* @ return the dialog HTML for all defined widgets of the named dialog ( page ) */
@ Override protected String createDialogHtml ( String dialog ) { } } | StringBuffer result = new StringBuffer ( 1024 ) ; // create table
result . append ( createWidgetTableStart ( ) ) ; // show error header once if there were validation errors
result . append ( createWidgetErrorHeader ( ) ) ; if ( dialog . equals ( PAGES [ 0 ] ) ) { result . append ( dialogBlockStart ( key ( "label.moduleinformation" ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 0 , 7 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; result . append ( dialogBlockStart ( key ( "label.modulecreator" ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 8 , 9 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; result . append ( dialogBlockStart ( key ( "label.moduleexportmode" ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 10 , 10 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; if ( CmsStringUtil . isEmpty ( m_module . getName ( ) ) ) { result . append ( dialogBlockStart ( key ( "label.modulefolder" ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 11 , 17 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; } } // close table
result . append ( createWidgetTableEnd ( ) ) ; return result . toString ( ) ; |
public class CachedDiscoveryService { /** * ~ Methods * * * * * */
@ Override public void dispose ( ) { } } | super . dispose ( ) ; _cacheService . dispose ( ) ; _discoveryService . dispose ( ) ; _executorService . shutdown ( ) ; _disposeExecutorService ( ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcTextAlignment ( ) { } } | if ( ifcTextAlignmentEClass == null ) { ifcTextAlignmentEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 745 ) ; } return ifcTextAlignmentEClass ; |
public class PropertiesManager { /** * Load the current value of the given property from the file without
* modifying the values of any other properties . In other words ,
* { @ link # isModified ( Object ) } will return < code > false < / code > for the given
* property after this call completes , but it will return < code > true < / code >
* for any other properties that have been modified since the last load or
* save . < br >
* < br >
* This method will block and wait for the property to be loaded . See
* { @ link # loadPropertyNB ( Object ) } for a non - blocking version .
* @ param property
* the property to load
* @ throws IOException
* if there is an error while attempting to read the property
* from the file */
public void loadProperty ( T property ) throws IOException { } } | try { Future < Void > task = loadPropertyNB ( property ) ; task . get ( ) ; } catch ( ExecutionException e ) { Throwable t = e . getCause ( ) ; if ( t instanceof IOException ) { throw ( IOException ) t ; } throw new IOException ( t ) ; } catch ( InterruptedException e ) { throw new IOException ( "Loading of the property " + property + " from file \"" + getFile ( ) . getAbsolutePath ( ) + "\" was interrupted." ) ; } |
public class DataFrameInputStream { /** * Indicates that the current record has finished processing . When invoked , the DataFrameInputStream will be repositioned
* at the start of the next record . This method only needs to be called upon a successful record read . If an IOException
* occurred while reading data from a record , either the DataFrameInputStream will be auto - closed or a subsequent call
* to beginRecord ( ) will reposition the stream accordingly .
* @ return A RecordInfo containing metadata about the record that just ended , such as addressing information .
* @ throws IOException If an IO Exception occurred .
* @ throws DurableDataLogException If a non - IO Exception has occurred , usually thrown by the underlying DurableDataLog . */
DataFrameRecord . RecordInfo endRecord ( ) throws IOException , DurableDataLogException { } } | DataFrameRecord . RecordInfo r = this . currentRecordBuilder . build ( ) ; while ( this . currentEntry != null ) { if ( this . currentEntry . isLastRecordEntry ( ) ) { this . currentEntry . getData ( ) . close ( ) ; resetContext ( ) ; } else { fetchNextEntry ( ) ; } } return r ; |
public class SaxonServlet { /** * Get the content at the given location using the configured credentials
* ( if any ) . */
private InputStream getInputStream ( String url ) throws Exception { } } | HttpGet getMethod = new HttpGet ( url ) ; DefaultHttpClient client = new DefaultHttpClient ( m_cManager ) ; client . getParams ( ) . setIntParameter ( CoreConnectionPNames . CONNECTION_TIMEOUT , TIMEOUT_SECONDS * 1000 ) ; UsernamePasswordCredentials creds = getCreds ( url ) ; if ( creds != null ) { client . getCredentialsProvider ( ) . setCredentials ( AuthScope . ANY , creds ) ; client . addRequestInterceptor ( new PreemptiveAuth ( ) ) ; } client . getParams ( ) . setBooleanParameter ( ClientPNames . HANDLE_REDIRECTS , true ) ; HttpInputStream in = new HttpInputStream ( client , getMethod ) ; if ( in . getStatusCode ( ) != 200 ) { try { in . close ( ) ; } catch ( Exception e ) { } throw new IOException ( "HTTP request failed. Got status code " + in . getStatusCode ( ) + " from remote server while attempting to GET " + url ) ; } else { return in ; } |
public class Entry { /** * Sets the value of the key property .
* @ param value
* allowed object is
* { @ link Object } */
public void setKey ( org . openprovenance . prov . model . Key value ) { } } | this . key = ( org . openprovenance . prov . sql . Key ) value ; |
public class AbstractJacksonContext { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . AbstractContext # createParser ( java . io . File ) */
@ Override public JacksonWrapperParser createParser ( File file ) { } } | try { return new JacksonWrapperParser ( innerFactory . createParser ( file ) , getSupportedFormat ( ) ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; throw new KriptonRuntimeException ( e ) ; } |
public class MultiLineString { /** * Returns a list of LineStrings which are currently making up this MultiLineString .
* @ return a list of { @ link LineString } s
* @ since 3.0.0 */
public List < LineString > lineStrings ( ) { } } | List < List < Point > > coordinates = coordinates ( ) ; List < LineString > lineStrings = new ArrayList < > ( coordinates . size ( ) ) ; for ( List < Point > points : coordinates ) { lineStrings . add ( LineString . fromLngLats ( points ) ) ; } return lineStrings ; |
public class WindowUtils { /** * Sets the shape of a window .
* This will be done via a com . sun API and may be not available on all platforms .
* @ param window to change the shape for
* @ param s the new shape for the window . */
public static void setWindowShape ( Window window , Shape s ) { } } | if ( PlatformUtils . isJava6 ( ) ) { setWindowShapeJava6 ( window , s ) ; } else { setWindowShapeJava7 ( window , s ) ; } |
public class ExpressRouteCircuitConnectionsInner { /** * Gets the specified Express Route Circuit Connection from the specified express route circuit .
* @ param resourceGroupName The name of the resource group .
* @ param circuitName The name of the express route circuit .
* @ param peeringName The name of the peering .
* @ param connectionName The name of the express route circuit connection .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ExpressRouteCircuitConnectionInner object if successful . */
public ExpressRouteCircuitConnectionInner get ( String resourceGroupName , String circuitName , String peeringName , String connectionName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , circuitName , peeringName , connectionName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CsvEscapeUtil { /** * Perform an escape operation , based on char [ ] , according to the specified level and type . */
static void escape ( final char [ ] text , final int offset , final int len , final Writer writer ) throws IOException { } } | if ( text == null || text . length == 0 ) { return ; } final int max = ( offset + len ) ; int readOffset = offset ; for ( int i = offset ; i < max ; i ++ ) { final char c = text [ i ] ; /* * Shortcut : most characters will be Alphanumeric , and we won ' t need to do anything at
* all for them . */
if ( ( c >= 'a' && c <= 'z' ) || ( c >= 'A' && c <= 'Z' ) || ( c >= '0' && c <= '9' ) ) { continue ; } /* * At this point we know for sure we will need some kind of escape , so we
* initialize the string builder . */
if ( readOffset == offset ) { // If we need this , it ' s because we have non - alphanumeric chars . And that means
// we should enclose in double - quotes .
writer . write ( DOUBLE_QUOTE ) ; } /* * Now we copy all the contents pending up to this point . */
if ( i - readOffset > 0 ) { writer . write ( text , readOffset , ( i - readOffset ) ) ; } readOffset = i + 1 ; /* * Check whether the character is a double - quote ( in which case , we escape it ) */
if ( c == DOUBLE_QUOTE ) { writer . write ( TWO_DOUBLE_QUOTES ) ; continue ; } writer . write ( c ) ; } /* * Final cleaning : append the remaining unescaped text to the string builder and return . */
if ( max - readOffset > 0 ) { writer . write ( text , readOffset , ( max - readOffset ) ) ; } if ( readOffset > offset ) { // If we reached here , it ' s because we had non - alphanumeric chars . And that means
// we should enclose in double - quotes .
writer . write ( DOUBLE_QUOTE ) ; } |
public class Link { /** * Creats a new { @ link Link } with the given { @ link Affordance } s .
* @ param affordances must not be { @ literal null } .
* @ return */
public Link withAffordances ( List < Affordance > affordances ) { } } | return new Link ( this . rel , this . href , this . hreflang , this . media , this . title , this . type , this . deprecation , this . profile , this . name , this . template , affordances ) ; |
public class CoinbaseMarketDataServiceRaw { /** * Unauthenticated resource that tells you the price to buy one unit .
* @ param pair The currency pair .
* @ return The price in the desired { @ code currency } to buy one unit .
* @ throws IOException
* @ see < a
* href = " https : / / developers . coinbase . com / api / v2 # get - buy - price " > developers . coinbase . com / api / v2 # get - buy - price < / a > */
public CoinbasePrice getCoinbaseBuyPrice ( Currency base , Currency counter ) throws IOException { } } | return coinbase . getBuyPrice ( Coinbase . CB_VERSION_VALUE , base + "-" + counter ) . getData ( ) ; |
public class IOUtil { /** * Deletes a file , and if it is a folder , it deletes all its contents ( including sub - folders ) , so that it can be deleted without problem
* @ param f The file or folder to delete
* @ return the return value of { @ link java . io . File # delete ( ) } */
public static boolean delete ( File f ) { } } | if ( f == null || ! f . exists ( ) ) return true ; if ( f . isDirectory ( ) ) { File [ ] subFiles = f . listFiles ( ) ; if ( subFiles != null ) { for ( File sf : subFiles ) { IOUtil . delete ( sf ) ; } } } return f . delete ( ) ; |
public class EnumSet { /** * Creates an enum set with the same element type as the specified enum
* set , initially containing the same elements ( if any ) .
* @ param < E > The class of the elements in the set
* @ param s the enum set from which to initialize this enum set
* @ return A copy of the specified enum set .
* @ throws NullPointerException if < tt > s < / tt > is null */
public static < E extends Enum < E > > EnumSet < E > copyOf ( EnumSet < E > s ) { } } | return s . clone ( ) ; |
public class EhCacheProvider { /** * ( non - Javadoc )
* @ see com . impetus . kundera . cache . CacheProvider # init ( java . util . Map ) */
@ Override public synchronized void init ( Map < ? , ? > properties ) { } } | if ( manager != null ) { log . warn ( "Attempt to restart an already started CacheFactory. Using previously created EhCacheFactory." ) ; return ; } initializing = true ; try { String configurationResourceName = null ; if ( properties != null ) { configurationResourceName = ( String ) properties . get ( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME ) ; } if ( configurationResourceName == null || configurationResourceName . length ( ) == 0 ) { manager = CacheManager . create ( ) ; } else { if ( ! configurationResourceName . startsWith ( "/" ) ) { configurationResourceName = "/" + configurationResourceName ; log . info ( "prepending / to " + configurationResourceName + ". It should be placed in the root" + "of the classpath rather than in a package." ) ; } URL url = loadResource ( configurationResourceName ) ; manager = CacheManager . create ( url ) ; } } catch ( net . sf . ehcache . CacheException e ) { if ( e . getMessage ( ) . startsWith ( "Cannot parseConfiguration CacheManager. Attempt to create a new instance of " + "CacheManager using the diskStorePath" ) ) { throw new CacheException ( "Could not init EhCacheFactory." , e ) ; } else { throw new CacheException ( e ) ; } } finally { initializing = false ; } |
public class TabularDataConverter { private JSONObject getAsJsonObject ( Object pFrom ) { } } | JSONAware jsonVal = toJSON ( pFrom ) ; if ( ! ( jsonVal instanceof JSONObject ) ) { throw new IllegalArgumentException ( "Expected JSON type for a TabularData is JSONObject, not " + jsonVal . getClass ( ) ) ; } return ( JSONObject ) jsonVal ; |
public class HasManyModel { /** * Find all objects from the child list .
* TODO : Figure out how to make this accesible without . . .
* creating a dummy instance .
* @ throws com . mauriciogiordano . easydb . exception . NoContextFoundException in case of null context .
* @ return A list of all children . */
public List < C > findAllChildren ( ) { } } | List < String > objects = getChildrenList ( ) ; List < C > children = new ArrayList < C > ( ) ; try { Model dummy = ( Model ) childClazz . newInstance ( ) ; dummy . setContext ( context ) ; for ( String id : objects ) { children . add ( ( C ) dummy . find ( id ) ) ; } } catch ( IllegalAccessException e ) { e . printStackTrace ( ) ; } catch ( InstantiationException e ) { e . printStackTrace ( ) ; } return children ; |
public class RequestMessage { /** * @ see javax . servlet . ServletRequest # removeAttribute ( java . lang . String ) */
@ Override public void removeAttribute ( String name ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Removing attribute: " + name ) ; } this . attributes . remove ( name ) ; |
public class SimpleSolrPersistentEntity { /** * ( non - Javadoc )
* @ see org . springframework . data . solr . core . mapping . SolrPersistentEntity # getCollectionName ( ) */
@ Override public String getCollectionName ( ) { } } | if ( expression == null ) { return collectionName ; } EvaluationContext ctx = getEvaluationContext ( null ) ; ctx . setVariable ( "targetType" , typeInformation . getType ( ) ) ; return expression . getValue ( ctx , String . class ) ; |
public class ChargingStationEventListener { /** * Updates a charging station ' s component availability .
* @ param chargingStationId the charging station ' s id .
* @ param componentId the component ' s id .
* @ param component the component type .
* @ param availability the the charging station ' s new availability . */
private void updateComponentAvailability ( ChargingStationId chargingStationId , ComponentId componentId , ChargingStationComponent component , Availability availability ) { } } | if ( ! component . equals ( ChargingStationComponent . EVSE ) || ! ( componentId instanceof EvseId ) ) { return ; } ChargingStation chargingStation = repository . findOne ( chargingStationId . getId ( ) ) ; if ( chargingStation != null ) { for ( Evse evse : chargingStation . getEvses ( ) ) { if ( evse . getEvseId ( ) . equals ( componentId . getId ( ) ) ) { evse . setAvailability ( availability ) ; break ; } } repository . createOrUpdate ( chargingStation ) ; } |
public class ListResourcesForWebACLRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListResourcesForWebACLRequest listResourcesForWebACLRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listResourcesForWebACLRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listResourcesForWebACLRequest . getWebACLId ( ) , WEBACLID_BINDING ) ; protocolMarshaller . marshall ( listResourcesForWebACLRequest . getResourceType ( ) , RESOURCETYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BaseSession { /** * Do a remote action .
* @ param strCommand Command to perform remotely .
* @ param properties Properties for this command ( optional ) .
* @ return boolean success . */
public Object doRemoteAction ( String strCommand , Map < String , Object > properties ) throws DBException , RemoteException { } } | synchronized ( this . getTask ( ) ) { // Just being careful ( in case the user decides to do some data access )
// Don ' t override this , override doRemoteCommand ( xxx ) ;
return this . handleRemoteCommand ( strCommand , properties , this ) ; } |
public class DistanceTravelledCalculator { /** * Returns a sequence of { @ link Options } that are same as the source apart
* from the { @ link Bounds } which are partitioned according to horizontal and
* vertical parameters . For map - reduce purposes we need to be able to
* partition the bounds of Options . Passing horizontal = 1 and vertical = 1 will
* return one item only being a copy of the source { @ link Options } .
* @ param options
* @ param horizontal
* number of regions ( with longitude )
* @ param vertical
* number of regions ( with latitude )
* @ return */
public static Observable < Options > partition ( final Options options , final int horizontal , final int vertical ) { } } | List < Options > list = new ArrayList < > ( ) ; Bounds bounds = options . getBounds ( ) ; double h = bounds . getWidthDegrees ( ) / horizontal ; double v = bounds . getHeightDegrees ( ) / vertical ; for ( int i = 0 ; i < horizontal ; i ++ ) { for ( int j = 0 ; j < vertical ; j ++ ) { double lat = bounds . getTopLeftLat ( ) - j * v ; double lon = bounds . getTopLeftLon ( ) + i * h ; Bounds b = new Bounds ( lat , lon , lat - v , lon + h ) ; list . add ( options . buildFrom ( ) . bounds ( b ) . filterBounds ( b . expand ( 7 , 7 ) ) . build ( ) ) ; } } return Observable . from ( list ) ; |
public class Startup { /** * Read a external file or a resource .
* @ param filename file / resource to access
* @ param context printable non - natural language context for errors
* @ param mh handler for error messages
* @ return file as startup entry , or null when error ( message has been printed ) */
private static StartupEntry readFile ( String filename , String context , MessageHandler mh ) { } } | if ( filename != null ) { try { byte [ ] encoded = Files . readAllBytes ( toPathResolvingUserHome ( filename ) ) ; return new StartupEntry ( false , filename , new String ( encoded ) , LocalDateTime . now ( ) . format ( DateTimeFormatter . ofLocalizedDateTime ( FormatStyle . MEDIUM ) ) ) ; } catch ( AccessDeniedException e ) { mh . errormsg ( "jshell.err.file.not.accessible" , context , filename , e . getMessage ( ) ) ; } catch ( NoSuchFileException e ) { String resource = getResource ( filename ) ; if ( resource != null ) { // Not found as file , but found as resource
return new StartupEntry ( true , filename , resource ) ; } mh . errormsg ( "jshell.err.file.not.found" , context , filename ) ; } catch ( Exception e ) { mh . errormsg ( "jshell.err.file.exception" , context , filename , e ) ; } } else { mh . errormsg ( "jshell.err.file.filename" , context ) ; } return null ; |
public class RouterClient { /** * Preview fields auto - generated during router create and update operations . Calling this method
* does NOT create or update the router .
* < p > Sample code :
* < pre > < code >
* try ( RouterClient routerClient = RouterClient . create ( ) ) {
* ProjectRegionRouterName router = ProjectRegionRouterName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ ROUTER ] " ) ;
* Router routerResource = Router . newBuilder ( ) . build ( ) ;
* RoutersPreviewResponse response = routerClient . previewRouter ( router , routerResource ) ;
* < / code > < / pre >
* @ param router Name of the Router resource to query .
* @ param routerResource Router resource .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final RoutersPreviewResponse previewRouter ( ProjectRegionRouterName router , Router routerResource ) { } } | PreviewRouterHttpRequest request = PreviewRouterHttpRequest . newBuilder ( ) . setRouter ( router == null ? null : router . toString ( ) ) . setRouterResource ( routerResource ) . build ( ) ; return previewRouter ( request ) ; |
public class AbstractViewHolderAdapter { /** * Sets the parent view , whose appearance should currently be customized by the decorator . This
* method should never be called or overridden by any custom adapter implementation .
* @ param currentParentView
* The parent view , which should be set , as an instance of the class { @ link View } . The
* parent view may not be null */
protected final void setCurrentParentView ( @ NonNull final View currentParentView ) { } } | Condition . INSTANCE . ensureNotNull ( currentParentView , "The parent view may not be null" ) ; this . currentParentView = currentParentView ; |
public class GaliosFieldTableOps { /** * Evaluate the polynomial using Horner ' s method . Avoids explicit calculating the powers of x .
* < p > 01x * * 4 + 0fx * * 3 + 36x * * 2 + 78x + 40 = ( ( ( 01 x + 0f ) x + 36 ) x + 78 ) x + 40 < / p >
* < p > Coefficients for largest powers are first , e . g . 2 * x * * 3 + 8 * x * * 2 + 1 = [ 2,8,0,1 ] < / p >
* @ param input Polynomial being evaluated
* @ param x Value of x
* @ return Output of function */
public int polyEval ( GrowQueue_I8 input , int x ) { } } | int y = input . data [ 0 ] & 0xFF ; for ( int i = 1 ; i < input . size ; i ++ ) { y = multiply ( y , x ) ^ ( input . data [ i ] & 0xFF ) ; } return y ; |
public class ExtensionPassiveScan { /** * Sets whether or not the plug - in passive scanner with the given { @ code pluginId } is { @ code enabled } .
* @ param pluginId the ID of the plug - in passive scanner
* @ param enabled { @ code true } if the scanner should be enabled , { @ code false } otherwise */
void setPluginPassiveScannerEnabled ( int pluginId , boolean enabled ) { } } | PluginPassiveScanner scanner = getPluginPassiveScanner ( pluginId ) ; if ( scanner != null ) { scanner . setEnabled ( enabled ) ; scanner . save ( ) ; } |
public class StreamsUtils { /** * < p > Generates a stream that is computed from a provided int stream by first rolling it in the same
* way as the < code > roll ( ) < / code > method does . Then a summarizing int operation is applied on each
* substream to form the final int summary stream . No boxing / unboxing is conducted in the process .
* < p > The resulting stream has the same number of elements as the provided stream ,
* minus the size of the window width , to preserve consistency of each collection . < / p >
* < p > A < code > NullPointerException < / code > will be thrown if the provided stream is null . < / p >
* @ param intStream the processed stream
* @ param rollingFactor the size of the window to apply the collector on
* @ return a stream in which each value is the collection of the provided stream */
public static Stream < IntSummaryStatistics > shiftingWindowSummarizingInt ( IntStream intStream , int rollingFactor ) { } } | Objects . requireNonNull ( intStream ) ; RollingOfIntSpliterator ofIntSpliterator = RollingOfIntSpliterator . of ( intStream . spliterator ( ) , rollingFactor ) ; return StreamSupport . stream ( ofIntSpliterator , intStream . isParallel ( ) ) . onClose ( intStream :: close ) . map ( str -> str . collect ( IntSummaryStatistics :: new , IntSummaryStatistics :: accept , IntSummaryStatistics :: combine ) ) ; |
public class EarDescriptorBuilder { /** * Writes WEB part to application . xml .
* @ param filename name of module */
private void writeEjbModule ( String filename ) { } } | Element element = writeModule ( ) ; Element ejb = doc . createElement ( "ejb" ) ; element . appendChild ( ejb ) ; ejb . setTextContent ( filename ) ; |
public class CleverTapAPI { /** * Pushes the Notification Viewed event to CleverTap .
* @ param extras The { @ link Bundle } object that contains the
* notification details */
@ SuppressWarnings ( { } } | "unused" , "WeakerAccess" } ) public void pushNotificationViewedEvent ( Bundle extras ) { if ( extras == null || extras . isEmpty ( ) || extras . get ( Constants . NOTIFICATION_TAG ) == null ) { getConfigLogger ( ) . debug ( getAccountId ( ) , "Push notification: " + ( extras == null ? "NULL" : extras . toString ( ) ) + " not from CleverTap - will not process Notification Viewed event." ) ; return ; } if ( ! extras . containsKey ( Constants . NOTIFICATION_ID_TAG ) || ( extras . getString ( Constants . NOTIFICATION_ID_TAG ) == null ) ) { getConfigLogger ( ) . debug ( getAccountId ( ) , "Push notification ID Tag is null, not processing Notification Viewed event for: " + extras . toString ( ) ) ; return ; } // Check for dupe notification views ; if same notficationdId within specified time interval ( 2 secs ) don ' t process
boolean isDuplicate = checkDuplicateNotificationIds ( extras , notificationViewedIdTagMap , Constants . NOTIFICATION_VIEWED_ID_TAG_INTERVAL ) ; if ( isDuplicate ) { getConfigLogger ( ) . debug ( getAccountId ( ) , "Already processed Notification Viewed event for " + extras . toString ( ) + ", dropping duplicate." ) ; return ; } JSONObject event = new JSONObject ( ) ; try { JSONObject notif = getWzrkFields ( extras ) ; event . put ( "evtName" , Constants . NOTIFICATION_VIEWED_EVENT_NAME ) ; event . put ( "evtData" , notif ) ; } catch ( Throwable ignored ) { // no - op
} queueEvent ( context , event , Constants . RAISED_EVENT ) ; |
public class CognitoOptionsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CognitoOptions cognitoOptions , ProtocolMarshaller protocolMarshaller ) { } } | if ( cognitoOptions == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cognitoOptions . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( cognitoOptions . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( cognitoOptions . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; protocolMarshaller . marshall ( cognitoOptions . getRoleArn ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AbstractOptionsForUpdateOrDelete { /** * Add a single LWT result listeners . Example of usage :
* < pre class = " code " > < code class = " java " >
* LWTResultListener lwtListener = new LWTResultListener ( ) {
* public void onError ( LWTResult lwtResult ) {
* / / Get type of LWT operation that fails
* LWTResult . Operation operation = lwtResult . operation ( ) ;
* / / Print out current values
* TypedMap currentValues = lwtResult . currentValues ( ) ;
* currentValues
* . entrySet ( )
* . forEach ( entry - > System . out . println ( String . format ( " % s = % s " , entry . getKey ( ) , entry . getValue ( ) ) ) ) ;
* < / code > < / pre > */
public T withLwtResultListener ( LWTResultListener lwtResultListener ) { } } | this . lwtResultListeners = Optional . of ( asList ( lwtResultListener ) ) ; return getThis ( ) ; |
public class ThroughputStatServiceImpl { /** * < pre >
* 列出pipeLineId下 , start - end时间段下的throughputStat
* 首先从数据库中取出这一段时间所以数据 , 该数据都是根据end _ time倒排序的 , 每隔1分钟将这些数据分组
* < / pre > */
public Map < Long , ThroughputInfo > listTimelineThroughput ( TimelineThroughputCondition condition ) { } } | Assert . assertNotNull ( condition ) ; Map < Long , ThroughputInfo > throughputInfos = new LinkedHashMap < Long , ThroughputInfo > ( ) ; List < ThroughputStatDO > throughputStatDOs = throughputDao . listTimelineThroughputStat ( condition ) ; int size = throughputStatDOs . size ( ) ; int k = size - 1 ; for ( Long i = condition . getStart ( ) . getTime ( ) ; i <= condition . getEnd ( ) . getTime ( ) ; i += 60 * 1000 ) { ThroughputInfo throughputInfo = new ThroughputInfo ( ) ; List < ThroughputStat > throughputStat = new ArrayList < ThroughputStat > ( ) ; // 取出每个时间点i以内的数据 , k是一个游标 , 每次遍历时前面已经取过了的数据就不用再遍历了
for ( int j = k ; j >= 0 ; -- j ) { if ( ( i - throughputStatDOs . get ( j ) . getEndTime ( ) . getTime ( ) <= 60 * 1000 ) && ( i - throughputStatDOs . get ( j ) . getEndTime ( ) . getTime ( ) >= 0 ) ) { throughputStat . add ( throughputStatDOToModel ( throughputStatDOs . get ( j ) ) ) ; k = j - 1 ; } // 如果不满足if条件 , 则后面的数据也不用再遍历
else { break ; } } if ( throughputStat . size ( ) > 0 ) { throughputInfo . setItems ( throughputStat ) ; throughputInfo . setSeconds ( 1 * 60L ) ; throughputInfos . put ( i , throughputInfo ) ; } } return throughputInfos ; |
public class Organizer { /** * construct map for collection of criteria object wher the key is
* Criteria . getId
* @ param aCriterias
* @ return the map Criteria is the value and Criteria . getId is the key */
public static Map < String , Criteria > constructCriteriaMap ( Collection < Criteria > aCriterias ) { } } | if ( aCriterias == null ) return null ; Map < String , Criteria > map = new HashMap < String , Criteria > ( aCriterias . size ( ) ) ; Criteria criteria = null ; for ( Iterator < Criteria > i = aCriterias . iterator ( ) ; i . hasNext ( ) ; ) { criteria = ( Criteria ) i . next ( ) ; map . put ( criteria . getId ( ) , criteria ) ; } return map ; |
public class druidGLexer { /** * $ ANTLR start " SORT " */
public final void mSORT ( ) throws RecognitionException { } } | try { int _type = SORT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 622:6 : ( ( ' SORT ' ) )
// druidG . g : 622:8 : ( ' SORT ' )
{ // druidG . g : 622:8 : ( ' SORT ' )
// druidG . g : 622:9 : ' SORT '
{ match ( "SORT" ) ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class Wro4jAutoConfiguration { /** * Registeres the { @ code wroFilter } through a Spring
* { @ link FilterRegistrationBean } .
* @ param wroFilter The configured { @ code wroFilter }
* @ param wro4jProperties Needed for the url pattern to which the filter
* should be registered
* @ return The Spring { @ code FilterRegistrationBean } */
@ Bean FilterRegistrationBean wro4jFilterRegistration ( ConfigurableWroFilter wroFilter , Wro4jProperties wro4jProperties ) { } } | final FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean ( wroFilter ) ; filterRegistrationBean . addUrlPatterns ( wro4jProperties . getFilterUrl ( ) + "/*" ) ; return filterRegistrationBean ; |
public class PartitionReplicaFragmentVersions { /** * Change versions for all replicas with an index greater than { @ code fromReplica } to the new replica versions */
private void setVersions ( long [ ] newVersions , int fromReplica ) { } } | int fromIndex = fromReplica - 1 ; int len = newVersions . length - fromIndex ; arraycopy ( newVersions , fromIndex , versions , fromIndex , len ) ; |
public class NodeSchema { /** * \ pre : m _ columns is a bi - map to \ param m . */
public NodeSchema toTVEAndFixColumns ( Map < String , Pair < String , Integer > > nameMap ) { } } | final NodeSchema ns = copyAndReplaceWithTVE ( ) ; // First convert all non - TVE expressions to TVE in a copy
m_columns . clear ( ) ; m_columnsMapHelper . clear ( ) ; for ( int indx = 0 ; indx < ns . size ( ) ; ++ indx ) { // then update columns
final SchemaColumn sc = ns . getColumn ( indx ) ; assert ( sc . getExpression ( ) instanceof TupleValueExpression ) ; if ( nameMap . containsKey ( sc . getColumnName ( ) ) ) { final String newColName = nameMap . get ( sc . getColumnName ( ) ) . getFirst ( ) ; sc . reset ( sc . getTableName ( ) , sc . getTableAlias ( ) , newColName , sc . getColumnAlias ( ) ) ; sc . setDifferentiator ( indx ) ; TupleValueExpression exp = ( TupleValueExpression ) sc . getExpression ( ) ; exp . setColumnIndex ( indx ) ; exp . setColumnName ( newColName ) ; exp . setDifferentiator ( indx ) ; } } for ( SchemaColumn sc : ns ) { addColumn ( sc ) ; } return this ; |
public class UriBasedVehicleInterfaceMixin { /** * Convert the parameter to a URI and validate the correctness of its host
* and port .
* @ return true if the address and port are valid . */
public static boolean validateResource ( String uriString ) { } } | if ( uriString == null ) { return false ; } try { return validateResource ( createUri ( uriString ) ) ; } catch ( DataSourceException e ) { Log . d ( TAG , "URI is not valid" , e ) ; return false ; } |
public class AbstractDatabase { /** * Returns the expiration time of the document . null will be returned if there is
* no expiration time set
* @ param id The ID of the Document
* @ return Date a nullable expiration timestamp of the document or null if time not set .
* @ throws CouchbaseLiteException Throws an exception if any error occurs during the operation . */
public Date getDocumentExpiration ( @ NonNull String id ) throws CouchbaseLiteException { } } | if ( id == null ) { throw new IllegalArgumentException ( "document id cannot be null." ) ; } synchronized ( lock ) { try { if ( getC4Database ( ) . get ( id , true ) == null ) { throw new CouchbaseLiteException ( "Document doesn't exist in the database." , CBLError . Domain . CBLITE , CBLError . Code . NOT_FOUND ) ; } final long timestamp = getC4Database ( ) . getExpiration ( id ) ; return ( timestamp == 0 ) ? null : new Date ( timestamp ) ; } catch ( LiteCoreException e ) { throw CBLStatus . convertException ( e ) ; } } |
public class JobTemplateSettings { /** * Use Inputs ( inputs ) to define the source file used in the transcode job . There can only be one input in a job
* template . Using the API , you can include multiple inputs when referencing a job template .
* @ param inputs
* Use Inputs ( inputs ) to define the source file used in the transcode job . There can only be one input in a
* job template . Using the API , you can include multiple inputs when referencing a job template . */
public void setInputs ( java . util . Collection < InputTemplate > inputs ) { } } | if ( inputs == null ) { this . inputs = null ; return ; } this . inputs = new java . util . ArrayList < InputTemplate > ( inputs ) ; |
public class CodeGenerator { /** * Check { @ link FieldType } is validate to class type of { @ link Field }
* @ param type
* @ param field */
private void checkType ( FieldType type , Field field ) { } } | Class < ? > cls = field . getType ( ) ; if ( type == FieldType . OBJECT || type == FieldType . ENUM ) { return ; } String javaType = type . getJavaType ( ) ; if ( Integer . class . getSimpleName ( ) . equals ( javaType ) ) { if ( cls . getSimpleName ( ) . equals ( "int" ) || Integer . class . getSimpleName ( ) . equals ( cls . getSimpleName ( ) ) ) { return ; } throw new IllegalArgumentException ( getMismatchTypeErroMessage ( type , field ) ) ; } if ( ! javaType . equalsIgnoreCase ( cls . getSimpleName ( ) ) ) { throw new IllegalArgumentException ( getMismatchTypeErroMessage ( type , field ) ) ; } |
public class CmsXmlContainerPageFactory { /** * Factory method to unmarshal ( read ) a container page instance from a OpenCms VFS resource
* that contains XML data . < p >
* < b > Warning : < / b > < br / >
* This method does not support requested historic versions , it always loads the
* most recent version . Use < code > { @ link # unmarshal ( CmsObject , CmsResource , ServletRequest ) } < / code >
* for history support . < p >
* @ param cms the current cms object
* @ param resource the resource with the XML data to unmarshal
* @ return a container page instance unmarshalled from the provided resource
* @ throws CmsException if something goes wrong */
public static CmsXmlContainerPage unmarshal ( CmsObject cms , CmsResource resource ) throws CmsException { } } | // check the cache
CmsXmlContainerPage content = getCache ( cms , resource , true ) ; if ( content != null ) { return content ; } content = unmarshal ( cms , cms . readFile ( resource ) , true ) ; // set the cache
setCache ( cms , content , true ) ; return content ; |
public class EmptyZipkinFactory { /** * Build a new { @ link HttpTracing } instance for interfacing with Zipkin
* @ param environment Environment
* @ return HttpTracing instance */
@ Override public Optional < HttpTracing > build ( final Environment environment ) { } } | if ( ! isEnabled ( ) ) { LOGGER . warn ( "Zipkin tracing is disabled" ) ; return Optional . empty ( ) ; } LOGGER . info ( "Dropping all collected spans" ) ; return buildTracing ( environment , Reporter . NOOP ) ; |
public class DataEncoder { /** * Encodes the given optional byte array into a variable amount of
* bytes . If the byte array is null , exactly 1 byte is written . Otherwise ,
* the amount written can be determined by calling calculateEncodedLength .
* @ param value byte array value to encode , may be null
* @ param dst destination for encoded bytes
* @ param dstOffset offset into destination array
* @ return amount of bytes written */
public static int encode ( byte [ ] value , byte [ ] dst , int dstOffset ) { } } | if ( value == null ) { dst [ dstOffset ] = NULL_BYTE_HIGH ; return 1 ; } return encode ( value , 0 , value . length , dst , dstOffset ) ; |
public class GitHubLoginFunction { /** * okHttp connector to be used as backend for GitHub client .
* Uses proxy of jenkins
* If cache size > 0 , uses cache
* @ return connector to be used as backend for client */
private OkHttpConnector connector ( GitHubServerConfig config ) { } } | OkHttpClient client = new OkHttpClient ( ) . setProxy ( getProxy ( defaultIfBlank ( config . getApiUrl ( ) , GITHUB_URL ) ) ) ; if ( config . getClientCacheSize ( ) > 0 ) { Cache cache = toCacheDir ( ) . apply ( config ) ; client . setCache ( cache ) ; } return new OkHttpConnector ( new OkUrlFactory ( client ) ) ; |
public class WCOutputStream31 { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . webcontainer . osgi . response . WCOutputStream # close ( ) */
public void close ( ) throws java . io . IOException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "close output" ) ; } if ( this . _listener != null && this . isOutputStreamNBClosed ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "output stream close previously completed ...return " ) ; } return ; } try { this . _httpOut . setAsyncServletWriteListenerCallBack ( null ) ; super . close ( ) ; } finally { if ( this . _listener != null ) this . setOutputStreamNBClosed ( true ) ; } |
public class ProviderHelper { /** * Compare two provider group , return add list and remove list
* @ param oldGroup old provider group
* @ param newGroup new provider group
* @ param add provider list need add
* @ param remove provider list need remove */
public static void compareGroup ( ProviderGroup oldGroup , ProviderGroup newGroup , List < ProviderInfo > add , List < ProviderInfo > remove ) { } } | compareProviders ( oldGroup . getProviderInfos ( ) , newGroup . getProviderInfos ( ) , add , remove ) ; |
public class StaticArrayBuffer { /** * - - - - - ARRAY METHODS */
@ Override public byte [ ] getBytes ( int position , int length ) { } } | byte [ ] result = new byte [ length ] ; for ( int i = 0 ; i < length ; i ++ ) { result [ i ] = getByte ( position ) ; position += BYTE_LEN ; } return result ; |
public class FileTree { /** * Iterates over the file tree of a directory . It receives a visitor and will call its methods
* for each file in the directory .
* preVisitDirectory ( directory )
* visitFile ( file )
* - recursively the same for every subdirectory
* postVisitDirectory ( directory )
* @ param directory the directory to iterate
* @ param visitor the visitor that will be invoked for each directory / file in the tree */
public static void walkFileTree ( File directory , FileTreeVisitor visitor ) { } } | visitor . preVisitDirectory ( directory ) ; File [ ] files = directory . listFiles ( ) ; if ( files != null ) { for ( File file : files ) { if ( file . isDirectory ( ) ) { walkFileTree ( file , visitor ) ; } else { visitor . visitFile ( file ) ; } } } visitor . postVisitDirectory ( directory ) ; |
public class GDiscreteFourierTransformOps { /** * Computes the magnitude of the complex image : < br >
* magnitude = sqrt ( real < sup > 2 < / sup > + imaginary < sup > 2 < / sup > )
* @ param transform ( Input ) Complex interleaved image
* @ param magnitude ( Output ) Magnitude of image */
public static void magnitude ( ImageInterleaved transform , GrayF magnitude ) { } } | if ( transform instanceof InterleavedF32 ) { DiscreteFourierTransformOps . magnitude ( ( InterleavedF32 ) transform , ( GrayF32 ) magnitude ) ; } else if ( transform instanceof InterleavedF64 ) { DiscreteFourierTransformOps . magnitude ( ( InterleavedF64 ) transform , ( GrayF64 ) magnitude ) ; } else { throw new IllegalArgumentException ( "Unknown image type" ) ; } |
public class ApiUtilDAODefaultImpl { /** * GA : add synchronized */
public synchronized void remove_async_request ( final int id ) { } } | // Try to destroye Request object ( added by PV 7/9/06)
final AsyncCallObject aco = async_request_table . get ( id ) ; if ( aco != null ) { removePendingRepliesOfRequest ( aco . request ) ; ( ( org . jacorb . orb . ORB ) ApiUtil . getOrb ( ) ) . removeRequest ( aco . request ) ; async_request_table . remove ( id ) ; } |
public class ISID { /** * This method checks , if all fields are valid . In these cases an exception will be thrown .
* @ throws InternetSCSIException If the integrity is violated . */
protected final void checkIntegrity ( ) throws InternetSCSIException { } } | String exceptionMessage = "" ; switch ( t ) { case OUI_FORMAT : break ; case IANA_ENTERPRISE_NUMBER : break ; case RANDOM : // if ( d ! = 0 ) {
// exceptionMessage = " The D field is reserved in this ISID Format . " ;
break ; case RESERVED : if ( a != 0 && b != 0 && c != 0 && d != 0 ) { exceptionMessage = "This ISID is not valid. All" ; } break ; default : exceptionMessage = "This format is not supported." ; } if ( exceptionMessage . length ( ) > 0 ) { throw new InternetSCSIException ( exceptionMessage ) ; } else { // no error occured . . . Nice ! : - )
} |
public class JavaAgent { /** * an agent can be started in a VM as a javaagent allowing it to be embedded with some other main app */
public static void premain ( String args ) { } } | if ( args == null ) { args = "config=config.yaml" ; } try { start ( args . split ( "," ) ) ; } catch ( Exception e ) { System . err . println ( "Hawkular Java Agent failed at startup" ) ; e . printStackTrace ( System . err ) ; } |
public class LegacySpy { /** * Alias for { @ link # expectBetween ( int , int , Threads , Query ) } with arguments { @ code allowedStatements } , { @ link Integer # MAX _ VALUE } , { @ code threads } , { @ link Query # ANY }
* @ since 2.0 */
@ Deprecated public C expectAtLeast ( int allowedStatements , Threads threadMatcher ) { } } | return expect ( SqlQueries . minQueries ( allowedStatements ) . threads ( threadMatcher ) ) ; |
public class Application { /** * < p class = " changed _ added _ 2_0 " > < span
* class = " changed _ modified _ 2_2 " > Install < / span > the listener instance
* referenced by argument < code > listener < / code > into the application
* as a listener for events of type < code > systemEventClass < / code >
* that originate from objects of type < code > sourceClass < / code > . < / p >
* < div class = " changed _ added _ 2_0 " >
* < p > If argument < code > sourceClass < / code > is non - < code > null < / code > ,
* < code > sourceClass < / code > and < code > systemEventClass < / code > must be
* used to store the argument < code > listener < / code > in the application in
* such a way that the < code > listener < / code > can be quickly looked
* up by the implementation of { @ link # publishEvent } given
* < code > systemEventClass < / code > and an instance of the
* < code > Class < / code > referenced by < code > sourceClass < / code > . If
* argument < code > sourceClass < / code > is < code > null < / code > , the
* < code > listener < / code > must be discoverable by the implementation
* of { @ link # publishEvent } given only < code > systemEventClass < / code > .
* < / div >
* < div class = " changed _ added _ 2_2 " >
* < p > It is valid to call this method < strong > during < / strong > the
* processing of an event which was subscribed to by a previous call
* to this method . < / p >
* < / div >
* @ param systemEventClass the < code > Class < / code > of event for which
* < code > listener < / code > must be fired .
* @ param sourceClass the < code > Class < / code > of the instance which
* causes events of type < code > systemEventClass < / code > to be fired .
* May be < code > null < / code > .
* @ param listener the implementation of { @ link
* javax . faces . event . SystemEventListener } whose { @ link
* javax . faces . event . SystemEventListener # processEvent } method must be called when
* events of type < code > systemEventClass < / code > are fired .
* @ throws < code > NullPointerException < / code > if any combination of
* < code > systemEventClass < / code > , or < code > listener < / code > are
* < code > null < / code > .
* @ since 2.0 */
public void subscribeToEvent ( Class < ? extends SystemEvent > systemEventClass , Class < ? > sourceClass , SystemEventListener listener ) { } } | if ( defaultApplication != null ) { defaultApplication . subscribeToEvent ( systemEventClass , sourceClass , listener ) ; } else { throw new UnsupportedOperationException ( ) ; } |
public class FTPClient { /** * Deletes the remote file . */
public void deleteFile ( String filename ) throws IOException , ServerException { } } | if ( filename == null ) { throw new IllegalArgumentException ( "Required argument missing" ) ; } Command cmd = new Command ( "DELE" , filename ) ; try { controlChannel . execute ( cmd ) ; } catch ( FTPReplyParseException rpe ) { throw ServerException . embedFTPReplyParseException ( rpe ) ; } catch ( UnexpectedReplyCodeException urce ) { throw ServerException . embedUnexpectedReplyCodeException ( urce , "Server refused deleting file" ) ; } |
public class IndexTaskClient { /** * Sends an HTTP request to the task of the specified { @ code taskId } and returns a response if it succeeded . */
private FullResponseHolder submitRequest ( String taskId , @ Nullable String mediaType , // nullable if content is empty
HttpMethod method , String encodedPathSuffix , @ Nullable String encodedQueryString , byte [ ] content , boolean retry ) throws IOException , ChannelException , NoTaskLocationException { } } | final RetryPolicy retryPolicy = retryPolicyFactory . makeRetryPolicy ( ) ; while ( true ) { String path = StringUtils . format ( "%s/%s/%s" , BASE_PATH , StringUtils . urlEncode ( taskId ) , encodedPathSuffix ) ; Optional < TaskStatus > status = taskInfoProvider . getTaskStatus ( taskId ) ; if ( ! status . isPresent ( ) || ! status . get ( ) . isRunnable ( ) ) { throw new TaskNotRunnableException ( StringUtils . format ( "Aborting request because task [%s] is not runnable" , taskId ) ) ; } final TaskLocation location = taskInfoProvider . getTaskLocation ( taskId ) ; if ( location . equals ( TaskLocation . unknown ( ) ) ) { throw new NoTaskLocationException ( StringUtils . format ( "No TaskLocation available for task [%s]" , taskId ) ) ; } final Request request = createRequest ( taskId , location , path , encodedQueryString , method , mediaType , content ) ; FullResponseHolder response = null ; try { // Netty throws some annoying exceptions if a connection can ' t be opened , which happens relatively frequently
// for tasks that happen to still be starting up , so test the connection first to keep the logs clean .
checkConnection ( request . getUrl ( ) . getHost ( ) , request . getUrl ( ) . getPort ( ) ) ; response = submitRequest ( request ) ; int responseCode = response . getStatus ( ) . getCode ( ) ; if ( responseCode / 100 == 2 ) { return response ; } else if ( responseCode == 400 ) { // don ' t bother retrying if it ' s a bad request
throw new IAE ( "Received 400 Bad Request with body: %s" , response . getContent ( ) ) ; } else { throw new IOE ( "Received status [%d] and content [%s]" , responseCode , response . getContent ( ) ) ; } } catch ( IOException | ChannelException e ) { // Since workers are free to move tasks around to different ports , there is a chance that a task may have been
// moved but our view of its location has not been updated yet from ZK . To detect this case , we send a header
// identifying our expected recipient in the request ; if this doesn ' t correspond to the worker we messaged , the
// worker will return an HTTP 404 with its ID in the response header . If we get a mismatching task ID , then
// we will wait for a short period then retry the request indefinitely , expecting the task ' s location to
// eventually be updated .
final Duration delay ; if ( response != null && response . getStatus ( ) . equals ( HttpResponseStatus . NOT_FOUND ) ) { String headerId = StringUtils . urlDecode ( response . getResponse ( ) . headers ( ) . get ( ChatHandlerResource . TASK_ID_HEADER ) ) ; if ( headerId != null && ! headerId . equals ( taskId ) ) { log . warn ( "Expected worker to have taskId [%s] but has taskId [%s], will retry in [%d]s" , taskId , headerId , TASK_MISMATCH_RETRY_DELAY_SECONDS ) ; delay = Duration . standardSeconds ( TASK_MISMATCH_RETRY_DELAY_SECONDS ) ; } else { delay = retryPolicy . getAndIncrementRetryDelay ( ) ; } } else { delay = retryPolicy . getAndIncrementRetryDelay ( ) ; } final String urlForLog = request . getUrl ( ) . toString ( ) ; if ( ! retry ) { // if retry = false , we probably aren ' t too concerned if the operation doesn ' t succeed ( i . e . the request was
// for informational purposes only ) so don ' t log a scary stack trace
log . info ( "submitRequest failed for [%s], with message [%s]" , urlForLog , e . getMessage ( ) ) ; throw e ; } else if ( delay == null ) { log . warn ( e , "Retries exhausted for [%s], last exception:" , urlForLog ) ; throw e ; } else { try { final long sleepTime = delay . getMillis ( ) ; log . debug ( "Bad response HTTP [%s] from [%s]; will try again in [%s] (body/exception: [%s])" , ( response != null ? response . getStatus ( ) . getCode ( ) : "no response" ) , urlForLog , new Duration ( sleepTime ) . toString ( ) , ( response != null ? response . getContent ( ) : e . getMessage ( ) ) ) ; Thread . sleep ( sleepTime ) ; } catch ( InterruptedException e2 ) { Thread . currentThread ( ) . interrupt ( ) ; e . addSuppressed ( e2 ) ; throw new RuntimeException ( e ) ; } } } catch ( NoTaskLocationException e ) { log . info ( "No TaskLocation available for task [%s], this task may not have been assigned to a worker yet or " + "may have already completed" , taskId ) ; throw e ; } catch ( Exception e ) { log . warn ( e , "Exception while sending request" ) ; throw e ; } } |
public class EncodingUtils { /** * Is jce installed ?
* @ return the boolean */
public static boolean isJceInstalled ( ) { } } | try { val maxKeyLen = Cipher . getMaxAllowedKeyLength ( "AES" ) ; return maxKeyLen == Integer . MAX_VALUE ; } catch ( final NoSuchAlgorithmException e ) { return false ; } |
public class CassandraSchemaManager { /** * Checks if is counter column type .
* @ param tableInfo
* the table info
* @ param defaultValidationClass
* the default validation class
* @ return true , if is counter column type */
private boolean isCounterColumnType ( TableInfo tableInfo , String defaultValidationClass ) { } } | return ( csmd != null && csmd . isCounterColumn ( databaseName , tableInfo . getTableName ( ) ) ) || ( defaultValidationClass != null && ( defaultValidationClass . equalsIgnoreCase ( CounterColumnType . class . getSimpleName ( ) ) || defaultValidationClass . equalsIgnoreCase ( CounterColumnType . class . getName ( ) ) ) || ( tableInfo . getType ( ) . equals ( CounterColumnType . class . getSimpleName ( ) ) ) ) ; |
public class ServletSupport { /** * Dispatches http - request to url
* @ param url
* @ throws ServletException to abort request handling */
public static void forward ( String url , ServletRequest req , ServletResponse res ) throws ServletException , IOException { } } | RequestDispatcher dispatch = req . getRequestDispatcher ( url ) ; System . out . println ( new LogEntry ( "about to forward to " + url ) ) ; dispatch . forward ( req , res ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.