signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class IndexPreprocessor { /** * Processes index string and creates nodes with " prefix " in given " namespace _ url " from the parsed index entry text . * @ param theIndexString index string * param contents index contents * @ param theTargetDocument target document to create new nodes * @ param theIndexEntryFoundListener listener to notify that new index entry was found * @ return the array of nodes after processing index string */ private Node [ ] processIndexString ( final String theIndexString , final List < Node > contents , final Document theTargetDocument , final IndexEntryFoundListener theIndexEntryFoundListener ) { } }
final IndexEntry [ ] indexEntries = IndexStringProcessor . processIndexString ( theIndexString , contents ) ; for ( final IndexEntry indexEntrie : indexEntries ) { theIndexEntryFoundListener . foundEntry ( indexEntrie ) ; } return transformToNodes ( indexEntries , theTargetDocument , null ) ;
public class HtmlRewriter { /** * / * Rewrites the file to contain html tags */ public static void rewrite ( SoyFileNode file , IdGenerator nodeIdGen , ErrorReporter errorReporter ) { } }
new Visitor ( nodeIdGen , file . getFilePath ( ) , errorReporter ) . exec ( file ) ;
public class AbstractAmazonSNSAsync { /** * Simplified method form for invoking the SetTopicAttributes operation . * @ see # setTopicAttributesAsync ( SetTopicAttributesRequest ) */ @ Override public java . util . concurrent . Future < SetTopicAttributesResult > setTopicAttributesAsync ( String topicArn , String attributeName , String attributeValue ) { } }
return setTopicAttributesAsync ( new SetTopicAttributesRequest ( ) . withTopicArn ( topicArn ) . withAttributeName ( attributeName ) . withAttributeValue ( attributeValue ) ) ;
public class DeviceProxyDAODefaultImpl { public List < PipeInfo > getPipeConfig ( DeviceProxy deviceProxy ) throws DevFailed { } }
build_connection ( deviceProxy ) ; if ( deviceProxy . idl_version < 5 ) Except . throw_exception ( "TangoApi_NOT_SUPPORTED" , "Pipe not supported in IDL " + deviceProxy . idl_version ) ; ArrayList < PipeInfo > infoList = new ArrayList < PipeInfo > ( ) ; boolean done = false ; final int retries = deviceProxy . transparent_reconnection ? 2 : 1 ; for ( int tr = 0 ; tr < retries && ! done ; tr ++ ) { try { PipeConfig [ ] configurations = deviceProxy . device_5 . get_pipe_config_5 ( new String [ ] { "All pipes" } ) ; for ( PipeConfig configuration : configurations ) { infoList . add ( new PipeInfo ( configuration ) ) ; } done = true ; } catch ( final DevFailed e ) { // Except . print _ exception ( e ) ; throw e ; } catch ( final Exception e ) { if ( e . toString ( ) . startsWith ( "org.omg.CORBA.UNKNOWN: Server-side" ) ) { // Server does not implement pipe features return infoList ; } manageExceptionReconnection ( deviceProxy , retries , tr , e , this . getClass ( ) + ".DeviceProxy.getPipeConfig" ) ; } } return infoList ;
public class TransactionManagerImpl { /** * { @ inheritDoc } */ public Transaction suspend ( ) throws SystemException { } }
Transaction tx = registry . getTransaction ( ) ; registry . assignTransaction ( null ) ; return tx ;
public class BackPropagationNet { /** * Creates the weights for the hidden layers and output layer * @ param rand source of randomness */ private void setUp ( Random rand ) { } }
Ws = new ArrayList < > ( npl . length ) ; bs = new ArrayList < > ( npl . length ) ; // First Hiden layer takes input raw DenseMatrix W = new DenseMatrix ( npl [ 0 ] , inputSize ) ; Vec b = new DenseVector ( W . rows ( ) ) ; initializeWeights ( W , rand ) ; initializeWeights ( b , W . cols ( ) , rand ) ; Ws . add ( W ) ; bs . add ( b ) ; // Other Hiden Layers Layers for ( int i = 1 ; i < npl . length ; i ++ ) { W = new DenseMatrix ( npl [ i ] , npl [ i - 1 ] ) ; b = new DenseVector ( W . rows ( ) ) ; initializeWeights ( W , rand ) ; initializeWeights ( b , W . cols ( ) , rand ) ; Ws . add ( W ) ; bs . add ( b ) ; } // Output layer W = new DenseMatrix ( outputSize , npl [ npl . length - 1 ] ) ; b = new DenseVector ( W . rows ( ) ) ; initializeWeights ( W , rand ) ; initializeWeights ( b , W . cols ( ) , rand ) ; Ws . add ( W ) ; bs . add ( b ) ;
public class Ginv { /** * This routine performs the matrix multiplication . The final matrix size is * taken from the rows of the left matrix and the columns of the right * matrix . The timesInner is the minimum of the left columns and the right * rows . * @ param matrix1 * the first matrix * @ param matrix2 * the second matrix * @ param timesInner * number of rows / columns to process * @ return product of the two matrices */ public static double [ ] [ ] times ( double [ ] [ ] matrix1 , double [ ] [ ] matrix2 , int timesInner ) { } }
int timesRows = matrix1 . length ; int timesCols = matrix2 [ 0 ] . length ; double [ ] [ ] response = new double [ timesRows ] [ timesCols ] ; for ( int row = 0 ; row < timesRows ; row ++ ) { for ( int col = 0 ; col < timesCols ; col ++ ) { for ( int inner = 0 ; inner < timesInner ; inner ++ ) { response [ row ] [ col ] = matrix1 [ row ] [ inner ] * matrix2 [ inner ] [ col ] + response [ row ] [ col ] ; } } } return response ;
public class AbstractControllerServer { /** * @ param name * @ return * @ throws NotAvailableException */ protected final Object getDataField ( String name ) throws NotAvailableException { } }
try { MB dataClone = cloneDataBuilder ( ) ; Descriptors . FieldDescriptor findFieldByName = dataClone . getDescriptorForType ( ) . findFieldByName ( name ) ; if ( findFieldByName == null ) { throw new NotAvailableException ( "Field[" + name + "] does not exist for type " + dataClone . getClass ( ) . getName ( ) ) ; } return dataClone . getField ( findFieldByName ) ; } catch ( Exception ex ) { throw new NotAvailableException ( name , this , ex ) ; }
public class CmsTagReplaceSettings { /** * Sets the path under which files will be processed recursively . * @ param workPath the path under which files will be processed recursively . * @ throws CmsIllegalArgumentException if the argument is not valid . */ public void setWorkPath ( String workPath ) throws CmsIllegalArgumentException { } }
if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( workPath ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . GUI_ERR_WIDGETVALUE_EMPTY_0 ) ) ; } // test if it is a valid path : if ( ! m_cms . existsResource ( workPath ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . GUI_ERR_TAGREPLACE_WORKPATH_1 , workPath ) ) ; } m_workPath = workPath ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link BigInteger } * { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "depth" , scope = GetFolderTree . class ) public JAXBElement < BigInteger > createGetFolderTreeDepth ( BigInteger value ) { } }
return new JAXBElement < BigInteger > ( _GetDescendantsDepth_QNAME , BigInteger . class , GetFolderTree . class , value ) ;
public class CacheProviderWrapper { /** * This returns the cache entry identified by the specified cache id . * It returns null if not in the cache . * @ param id The cache id for the entry . The id cannot be null . * @ param source The source - local or remote ( No effect on CoreCache ) * @ param ignoreCounting true to ignore the statistics counting ( No effect on CoreCache ) * @ param incrementRefCount true to increment the refCount of the entry ( No effect on CoreCache ) * @ return The entry identified by the cache id . */ @ Override public com . ibm . websphere . cache . CacheEntry getEntry ( Object id , int source , boolean ignoreCounting , boolean incrementRefCount ) { } }
final String methodName = "getEntry()" ; com . ibm . websphere . cache . CacheEntry ce = this . coreCache . get ( id ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " id=" + id + " cacheEntry=" + ce ) ; } return ce ;
public class AbstractBuffer { /** * Resets the buffer ' s internal offset and capacity . */ protected AbstractBuffer reset ( long offset , long capacity , long maxCapacity ) { } }
this . offset = offset ; this . capacity = 0 ; this . initialCapacity = capacity ; this . maxCapacity = maxCapacity ; capacity ( initialCapacity ) ; references . set ( 0 ) ; rewind ( ) ; return this ;
public class Unchecked { /** * Wrap a { @ link CheckedLongToIntFunction } in a { @ link LongToIntFunction } with a custom handler for checked exceptions . * Example : * < code > < pre > * LongStream . of ( 1L , 2L , 3L ) . mapToInt ( Unchecked . longToIntFunction ( * if ( l & lt ; 0L ) * throw new Exception ( " Only positive numbers allowed " ) ; * return ( int ) l ; * throw new IllegalStateException ( e ) ; * < / pre > < / code > */ public static LongToIntFunction longToIntFunction ( CheckedLongToIntFunction function , Consumer < Throwable > handler ) { } }
return t -> { try { return function . applyAsInt ( t ) ; } catch ( Throwable e ) { handler . accept ( e ) ; throw new IllegalStateException ( "Exception handler must throw a RuntimeException" , e ) ; } } ;
public class AmazonApiGatewayClient { /** * Gets the usage data of a usage plan in a specified time interval . * @ param getUsageRequest * The GET request to get the usage data of a usage plan in a specified time interval . * @ return Result of the GetUsage operation returned by the service . * @ throws BadRequestException * The submitted request is not valid , for example , the input is incomplete or incorrect . See the * accompanying error message for details . * @ throws UnauthorizedException * The request is denied because the caller has insufficient permissions . * @ throws NotFoundException * The requested resource is not found . Make sure that the request URI is correct . * @ throws TooManyRequestsException * The request has reached its throttling limit . Retry after the specified time period . * @ sample AmazonApiGateway . GetUsage */ @ Override public GetUsageResult getUsage ( GetUsageRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetUsage ( request ) ;
public class WebAppParser { /** * Returns the text content of an element or null if the element is null . * @ param element the same element form which the context should be retrieved * @ return text content of element */ private static String getTextContent ( final Element element ) { } }
if ( element != null ) { String content = element . getTextContent ( ) ; if ( content != null ) { content = content . trim ( ) ; } return content ; } return null ;
public class GobblinHadoopJob { /** * Factory method that provides IPropertiesValidator based on preset in runtime . * Using factory method pattern as it is expected to grow . * @ return IPropertiesValidator */ private static IPropertiesValidator getValidator ( GobblinPresets preset ) { } }
Objects . requireNonNull ( preset ) ; switch ( preset ) { case MYSQL_TO_HDFS : return new MySqlToHdfsValidator ( ) ; case HDFS_TO_MYSQL : return new HdfsToMySqlValidator ( ) ; default : throw new UnsupportedOperationException ( "Preset " + preset + " is not supported" ) ; }
public class ApiOvhEmaildomain { /** * Alter this object properties * REST : PUT / email / domain / { domain } / mailingList / { name } * @ param body [ required ] New object properties * @ param domain [ required ] Name of your domain name * @ param name [ required ] Name of mailing list */ public void domain_mailingList_name_PUT ( String domain , String name , OvhMailingList body ) throws IOException { } }
String qPath = "/email/domain/{domain}/mailingList/{name}" ; StringBuilder sb = path ( qPath , domain , name ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class ArtistView { /** * Set the { @ link SoundCloudUser } used as model . * @ param artist user used as artist . */ public void setModel ( SoundCloudUser artist ) { } }
mModel = artist ; if ( mModel != null ) { Picasso . with ( getContext ( ) ) . load ( SoundCloudArtworkHelper . getCoverUrl ( mModel , SoundCloudArtworkHelper . XLARGE ) ) . fit ( ) . centerInside ( ) . into ( mAvatar ) ; mArtistName . setText ( mModel . getFullName ( ) ) ; mTracks . setText ( String . format ( getResources ( ) . getString ( R . string . artist_view_track_count ) , mModel . getTrackCount ( ) ) ) ; mDescription . setText ( Html . fromHtml ( mModel . getDescription ( ) ) ) ; this . setVisibility ( VISIBLE ) ; }
public class SShape { /** * Computes the membership function evaluated at ` x ` * @ param x * @ return ` \ begin { cases } 0h & \ mbox { if $ x \ leq s $ } \ cr h ( 2 \ left ( ( x - s ) / * ( e - s ) \ right ) ^ 2 ) & \ mbox { if $ x \ leq 0.5 ( s + e ) $ } \ cr h ( 1 - 2 \ left ( ( x - e ) / * ( e - s ) \ right ) ^ 2 ) & \ mbox { if $ x < e $ } \ cr 1h & \ mbox { otherwise } \ end { cases } ` * where ` h ` is the height of the Term , ` s ` is the start of the * SShape , ` e ` is the end of the SShape . */ @ Override public double membership ( double x ) { } }
if ( Double . isNaN ( x ) ) { return Double . NaN ; } if ( Op . isLE ( x , start ) ) { return height * 0.0 ; } else if ( Op . isLE ( x , 0.5 * ( start + end ) ) ) { return height * 2.0 * Math . pow ( ( x - start ) / ( end - start ) , 2 ) ; } else if ( Op . isLt ( x , end ) ) { return height * ( 1.0 - 2.0 * Math . pow ( ( x - end ) / ( end - start ) , 2 ) ) ; } return height * 1.0 ;
public class Transformation3D { /** * Calculates the Inverse transformation . * @ param src * The input transformation . * @ param result * The inverse of the input transformation . Throws the * GeometryException ( " math singularity " ) exception if the Inverse * can not be calculated . */ public static void inverse ( Transformation3D src , Transformation3D result ) { } }
double det = src . xx * ( src . yy * src . zz - src . zy * src . yz ) - src . yx * ( src . xy * src . zz - src . zy * src . xz ) + src . zx * ( src . xy * src . yz - src . yy * src . xz ) ; if ( det != 0 ) { double xx , yx , zx ; double xy , yy , zy ; double xz , yz , zz ; double xd , yd , zd ; double det_1 = 1.0 / det ; xx = ( src . yy * src . zz - src . zy * src . yz ) * det_1 ; xy = - ( src . xy * src . zz - src . zy * src . xz ) * det_1 ; xz = ( src . xy * src . yz - src . yy * src . xz ) * det_1 ; yx = - ( src . yx * src . zz - src . yz * src . zx ) * det_1 ; yy = ( src . xx * src . zz - src . zx * src . xz ) * det_1 ; yz = - ( src . xx * src . yz - src . yx * src . xz ) * det_1 ; zx = ( src . yx * src . zy - src . zx * src . yy ) * det_1 ; zy = - ( src . xx * src . zy - src . zx * src . xy ) * det_1 ; zz = ( src . xx * src . yy - src . yx * src . xy ) * det_1 ; xd = - ( src . xd * xx + src . yd * xy + src . zd * xz ) ; yd = - ( src . xd * yx + src . yd * yy + src . zd * yz ) ; zd = - ( src . xd * zx + src . yd * zy + src . zd * zz ) ; result . xx = xx ; result . yx = yx ; result . zx = zx ; result . xy = xy ; result . yy = yy ; result . zy = zy ; result . xz = xz ; result . yz = yz ; result . zz = zz ; result . xd = xd ; result . yd = yd ; result . zd = zd ; } else { throw new GeometryException ( "math singularity" ) ; }
public class ResourceIndexModule { /** * Shutdown the RI module by closing the wrapped ResourceIndex . * @ throws ModuleShutdownException * if any error occurs while closing . */ @ Override public void shutdownModule ( ) throws ModuleShutdownException { } }
if ( _ri != null ) { try { _ri . close ( ) ; } catch ( TrippiException e ) { throw new ModuleShutdownException ( "Error closing RI" , getRole ( ) , e ) ; } }
public class Expressions { /** * Create a new Template expression * @ deprecated Use { @ link # dslTemplate ( Class , Template , List ) } instead . * @ param cl type of expression * @ param template template * @ param args template parameters * @ return template expression */ @ Deprecated public static < T > DslTemplate < T > dslTemplate ( Class < ? extends T > cl , Template template , ImmutableList < ? > args ) { } }
return new DslTemplate < T > ( cl , template , args ) ;
public class AppCacheLinker { /** * Determines whether our not the given should be included in the app cache * manifest . Subclasses may override this method in order to filter out * specific file patterns . * @ param path the path of the resource being considered * @ return true if the file should be included in the manifest */ protected boolean accept ( String path ) { } }
// GWT Development Mode files if ( path . equals ( "hosted.html" ) || path . endsWith ( ".devmode.js" ) ) { return false ; } // Default or welcome file if ( path . equals ( "/" ) ) { return true ; } // Whitelisted file extension int pos = path . lastIndexOf ( '.' ) ; if ( pos != - 1 ) { String extension = path . substring ( pos + 1 ) ; if ( DEFAULT_EXTENSION_WHITELIST . contains ( extension ) ) { return true ; } } // Not included by default return false ;
public class AppServiceEnvironmentsInner { /** * Get metrics for a specific instance of a worker pool of an App Service Environment . * Get metrics for a specific instance of a worker pool of an App Service Environment . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; ResourceMetricInner & gt ; object if successful . */ public PagedList < ResourceMetricInner > listWorkerPoolInstanceMetricsNext ( final String nextPageLink ) { } }
ServiceResponse < Page < ResourceMetricInner > > response = listWorkerPoolInstanceMetricsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < ResourceMetricInner > ( response . body ( ) ) { @ Override public Page < ResourceMetricInner > nextPage ( String nextPageLink ) { return listWorkerPoolInstanceMetricsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class ModelsImpl { /** * Updates the closed list model . * @ param appId The application ID . * @ param versionId The version ID . * @ param clEntityId The closed list model ID . * @ param closedListModelUpdateObject The new entity name and words list . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > updateClosedListAsync ( UUID appId , String versionId , UUID clEntityId , ClosedListModelUpdateObject closedListModelUpdateObject , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateClosedListWithServiceResponseAsync ( appId , versionId , clEntityId , closedListModelUpdateObject ) , serviceCallback ) ;
public class P2sVpnGatewaysInner { /** * Creates a virtual wan p2s vpn gateway if it doesn ' t exist else updates the existing gateway . * @ param resourceGroupName The resource group name of the P2SVpnGateway . * @ param gatewayName The name of the gateway . * @ param p2SVpnGatewayParameters Parameters supplied to create or Update a virtual wan p2s vpn gateway . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < P2SVpnGatewayInner > createOrUpdateAsync ( String resourceGroupName , String gatewayName , P2SVpnGatewayInner p2SVpnGatewayParameters , final ServiceCallback < P2SVpnGatewayInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , gatewayName , p2SVpnGatewayParameters ) , serviceCallback ) ;
public class DSLMapWalker { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 34:1 : entry returns [ DSLMappingEntry mappingEntry ] : ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) ; */ public final DSLMappingEntry entry ( ) throws RecognitionException { } }
entry_stack . push ( new entry_scope ( ) ) ; DSLMappingEntry mappingEntry = null ; entry_stack . peek ( ) . retval = new AntlrDSLMappingEntry ( ) ; entry_stack . peek ( ) . variables = new HashMap < String , Integer > ( ) ; entry_stack . peek ( ) . keybuffer = new StringBuilder ( ) ; entry_stack . peek ( ) . valuebuffer = new StringBuilder ( ) ; entry_stack . peek ( ) . sentenceKeyBuffer = new StringBuilder ( ) ; entry_stack . peek ( ) . sentenceValueBuffer = new StringBuilder ( ) ; try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 51:5 : ( ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 51:7 : ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) { match ( input , VT_ENTRY , FOLLOW_VT_ENTRY_in_entry130 ) ; match ( input , Token . DOWN , null ) ; pushFollow ( FOLLOW_scope_section_in_entry132 ) ; scope_section ( ) ; state . _fsp -- ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 51:32 : ( meta _ section ) ? int alt2 = 2 ; int LA2_0 = input . LA ( 1 ) ; if ( ( LA2_0 == VT_META ) ) { alt2 = 1 ; } switch ( alt2 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 51:32 : meta _ section { pushFollow ( FOLLOW_meta_section_in_entry134 ) ; meta_section ( ) ; state . _fsp -- ; } break ; } pushFollow ( FOLLOW_key_section_in_entry137 ) ; key_section ( ) ; state . _fsp -- ; entry_stack . peek ( ) . retval . setVariables ( entry_stack . peek ( ) . variables ) ; entry_stack . peek ( ) . retval . setMappingKey ( entry_stack . peek ( ) . sentenceKeyBuffer . toString ( ) ) ; entry_stack . peek ( ) . retval . setKeyPattern ( entry_stack . peek ( ) . keybuffer . toString ( ) ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 56:9 : ( value _ section ) ? int alt3 = 2 ; int LA3_0 = input . LA ( 1 ) ; if ( ( LA3_0 == VT_ENTRY_VAL ) ) { alt3 = 1 ; } switch ( alt3 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 56:9 : value _ section { pushFollow ( FOLLOW_value_section_in_entry161 ) ; value_section ( ) ; state . _fsp -- ; } break ; } match ( input , Token . UP , null ) ; entry_stack . peek ( ) . retval . setMappingValue ( entry_stack . peek ( ) . sentenceValueBuffer . toString ( ) ) ; entry_stack . peek ( ) . retval . setValuePattern ( entry_stack . peek ( ) . valuebuffer . toString ( ) ) ; mappingEntry = entry_stack . peek ( ) . retval ; mapping_file_stack . peek ( ) . retval . addEntry ( mappingEntry ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving entry_stack . pop ( ) ; } return mappingEntry ;
public class Rollbar { /** * Record an error or message with extra data at the level specified . At least ene of ` error ` or * ` description ` must be non - null . If error is null , ` description ` will be sent as a message . If * error is non - null , description will be sent as the description of the error . Custom data will * be attached to message if the error is null . Custom data will extend whatever { @ link * Config # custom } returns . * @ param error the error ( if any ) . * @ param custom the custom data ( if any ) . * @ param description the description of the error , or the message to send . * @ param level the level to send it at . * @ param isUncaught whether or not this data comes from an uncaught exception . */ public void log ( Throwable error , Map < String , Object > custom , String description , Level level , boolean isUncaught ) { } }
RollbarThrowableWrapper rollbarThrowableWrapper = null ; if ( error != null ) { rollbarThrowableWrapper = new RollbarThrowableWrapper ( error ) ; } this . log ( rollbarThrowableWrapper , custom , description , level , isUncaught ) ;
public class CrcConcat { /** * Helper function to transform a CRC using lookup table . Currently it is used * for calculating CRC after adding bytes zeros to source byte array . The special * lookup table needs to be passed in for this specific transformation * @ param crc * @ param lookupTable * the first dimension is for which byte it applies to , the second * dimension is to lookup the byte and generate the outputs . * @ return the result CRC */ static int transform ( int crc , int [ ] [ ] lookupTable ) { } }
int cb1 = lookupTable [ 0 ] [ crc & 0xff ] ; int cb2 = lookupTable [ 1 ] [ ( crc >>>= 8 ) & 0xff ] ; int cb3 = lookupTable [ 2 ] [ ( crc >>>= 8 ) & 0xff ] ; int cb4 = lookupTable [ 3 ] [ ( crc >>>= 8 ) & 0xff ] ; return cb1 ^ cb2 ^ cb3 ^ cb4 ;
public class SerialArrayList { /** * Add all int . * @ param data the data * @ return the int */ public synchronized int addAll ( Collection < U > data ) { } }
int startIndex = length ( ) ; putAll ( data , startIndex ) ; return startIndex ;
public class StandardBullhornData { /** * { @ inheritDoc } */ @ Override public FileWrapper updateFile ( Class < ? extends FileEntity > type , Integer entityId , FileMeta fileMeta ) { } }
Map < String , String > uriVariables = restUriVariablesFactory . getUriVariablesForAddFile ( BullhornEntityInfo . getTypesRestEntityName ( type ) , entityId , fileMeta ) ; String url = restUrlFactory . assembleGetFileUrl ( ) ; CrudResponse response ; try { String jsonString = restJsonConverter . convertEntityToJsonString ( ( BullhornEntity ) fileMeta ) ; response = this . performPostRequest ( url , jsonString , UpdateResponse . class , uriVariables ) ; } catch ( HttpStatusCodeException error ) { response = restErrorHandler . handleHttpFourAndFiveHundredErrors ( new UpdateResponse ( ) , error , fileMeta . getId ( ) ) ; } return this . handleGetFileContentWithMetaData ( type , entityId , fileMeta . getId ( ) ) ;
public class ParserUtils { /** * reads from the specified point in the line and returns how many chars to * the specified delimiter * @ param line * @ param start * @ param delimiter * @ return int */ public static int getDelimiterOffset ( final String line , final int start , final char delimiter ) { } }
int idx = line . indexOf ( delimiter , start ) ; if ( idx >= 0 ) { idx -= start - 1 ; } return idx ;
public class JMElasticsearchSearchAndCount { /** * Search all search response . * @ param indices the indices * @ param types the types * @ param filterQueryBuilder the filter query builder * @ param aggregationBuilders the aggregation builders * @ return the search response */ public SearchResponse searchAll ( String [ ] indices , String [ ] types , QueryBuilder filterQueryBuilder , AggregationBuilder [ ] aggregationBuilders ) { } }
return searchAll ( false , indices , types , filterQueryBuilder , aggregationBuilders ) ;
public class ScalarizationUtils { /** * Scalarization values based on angle utility ( see Angle - based Preference * Models in Multi - objective Optimization by Braun et al . ) . Extreme points * are computed from the list of solutions . * @ param solutionsList A list of solutions . */ public static < S extends Solution < ? > > void angleUtility ( List < S > solutionsList ) { } }
angleUtility ( solutionsList , getExtremePoints ( solutionsList ) ) ;
public class GSFLWImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GSFLW__MH : setMH ( MH_EDEFAULT ) ; return ; case AfplibPackage . GSFLW__MFR : setMFR ( MFR_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class CenteringTransform { /** * Change the coordinate system of { @ code x } from the * " centered " graphical coordinate system to the global document coordinate system . * @ param x the x graphical coordinate to convert . * @ return the x coordinate in the global document coordinate system . */ @ Pure public double toGlobalX ( double x ) { } }
final double adjustedX = x - this . translationX . get ( ) ; return this . invertX . get ( ) ? - adjustedX : adjustedX ;
public class Iterators2 { /** * Divides an iterator into unmodifiable sublists of equivalent elements . The iterator groups elements * in consecutive order , forming a new parition when the value from the provided function changes . For example , * grouping the iterator { @ code [ 1 , 3 , 2 , 4 , 5 ] } with a function grouping even and odd numbers * yields { @ code [ [ 1 , 3 ] , [ 2 , 4 ] , [ 5 ] } all in the original order . * < p > The returned lists implement { @ link java . util . RandomAccess } . */ public static < T > Iterator < List < T > > groupBy ( final Iterator < ? extends T > iterator , final Function < ? super T , ? > groupingFunction ) { } }
requireNonNull ( iterator ) ; requireNonNull ( groupingFunction ) ; return new AbstractIterator < List < T > > ( ) { private final PeekingIterator < T > peekingIterator = peekingIterator ( iterator ) ; @ Override protected List < T > computeNext ( ) { if ( ! peekingIterator . hasNext ( ) ) return endOfData ( ) ; Object key = groupingFunction . apply ( peekingIterator . peek ( ) ) ; List < T > group = new ArrayList < > ( ) ; do { group . add ( peekingIterator . next ( ) ) ; } while ( peekingIterator . hasNext ( ) && Objects . equals ( key , groupingFunction . apply ( peekingIterator . peek ( ) ) ) ) ; return unmodifiableList ( group ) ; } } ;
public class JournalOutputFile { /** * Write the document trailer , clean up everything and rename the file . Set * the flag saying we are closed . */ public void close ( ) throws JournalException { } }
synchronized ( JournalWriter . SYNCHRONIZER ) { if ( ! open ) { return ; } try { parent . getDocumentTrailer ( xmlWriter ) ; xmlWriter . close ( ) ; fileWriter . close ( ) ; timer . cancel ( ) ; /* * java . io . File . renameTo ( ) has a known bug when working across * file - systems , see : * http : / / bugs . sun . com / bugdatabase / view _ bug . do ? bug _ id = 4073756 So * instead of this call : tempFile . renameTo ( file ) ; We use the * following line , and check for exception . . . */ try { FileMovingUtil . move ( tempFile , file ) ; } catch ( IOException e ) { throw new JournalException ( "Failed to rename file from '" + tempFile . getPath ( ) + "' to '" + file . getPath ( ) + "'" , e ) ; } open = false ; } catch ( XMLStreamException e ) { throw new JournalException ( e ) ; } catch ( IOException e ) { throw new JournalException ( e ) ; } }
public class UpdateBaseRates { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ param baseRateId the base rate ID to update . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long baseRateId ) throws RemoteException { } }
// Get the BaseRateService . BaseRateServiceInterface baseRateService = adManagerServices . get ( session , BaseRateServiceInterface . class ) ; // Create a statement to only select a single base rate by ID . StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "id = :id" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "id" , baseRateId ) ; // Get the base rate . BaseRatePage page = baseRateService . getBaseRatesByStatement ( statementBuilder . toStatement ( ) ) ; BaseRate baseRate = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; // Update base rate value to $ 3 USD . Money newRate = new Money ( ) ; newRate . setCurrencyCode ( "USD" ) ; newRate . setMicroAmount ( 3000000L ) ; if ( baseRate instanceof ProductTemplateBaseRate ) { ( ( ProductTemplateBaseRate ) baseRate ) . setRate ( newRate ) ; } else if ( baseRate instanceof ProductBaseRate ) { ( ( ProductBaseRate ) baseRate ) . setRate ( newRate ) ; } // Update the base rate on the server . BaseRate [ ] baseRates = baseRateService . updateBaseRates ( new BaseRate [ ] { baseRate } ) ; for ( BaseRate updatedBaseRate : baseRates ) { System . out . printf ( "Base rate with ID %d and type '%s'," + " belonging to rate card ID %d was updated.%n" , updatedBaseRate . getId ( ) , updatedBaseRate . getClass ( ) . getSimpleName ( ) , updatedBaseRate . getRateCardId ( ) ) ; }
public class ComponentPropertiesKeysListResolver { /** * Gets the display values with the full properties keys as a List of { @ link ResourceBundleKey } . * @ return the display values */ public List < ResourceBundleKey > getDisplayValues ( ) { } }
final List < ResourceBundleKey > rbk = new ArrayList < > ( ) ; for ( final ResourceBundleKey key : getValues ( ) ) { final ResourceBundleKey clone = ( ResourceBundleKey ) key . clone ( ) ; clone . setKey ( getPropertiesKey ( key . getKey ( ) ) ) ; rbk . add ( clone ) ; } return rbk ;
public class CloseableIterables { /** * Returns an iterable that applies { @ code function } to each element of { @ code * fromIterable } . */ public static < F , T > CloseableIterable < T > transform ( final CloseableIterable < F > iterable , final Function < ? super F , ? extends T > function ) { } }
return wrap ( Iterables . transform ( iterable , function :: apply ) , iterable ) ;
public class druidGLexer { /** * $ ANTLR start " FROM " */ public final void mFROM ( ) throws RecognitionException { } }
try { int _type = FROM ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 628:7 : ( ( ' FROM ' | ' from ' ) ) // druidG . g : 628:9 : ( ' FROM ' | ' from ' ) { // druidG . g : 628:9 : ( ' FROM ' | ' from ' ) int alt17 = 2 ; int LA17_0 = input . LA ( 1 ) ; if ( ( LA17_0 == 'F' ) ) { alt17 = 1 ; } else if ( ( LA17_0 == 'f' ) ) { alt17 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 17 , 0 , input ) ; throw nvae ; } switch ( alt17 ) { case 1 : // druidG . g : 628:10 : ' FROM ' { match ( "FROM" ) ; } break ; case 2 : // druidG . g : 628:17 : ' from ' { match ( "from" ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class VaultsInner { /** * Gets the deleted Azure key vault . * @ param vaultName The name of the vault . * @ param location The location of the deleted vault . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DeletedVaultInner object if successful . */ public DeletedVaultInner getDeleted ( String vaultName , String location ) { } }
return getDeletedWithServiceResponseAsync ( vaultName , location ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DirectoryListing { /** * Writable interface */ @ Override public void readFields ( DataInput in ) throws IOException { } }
int numEntries = in . readInt ( ) ; partialListing = new HdfsFileStatus [ numEntries ] ; for ( int i = 0 ; i < numEntries ; i ++ ) { partialListing [ i ] = new HdfsFileStatus ( ) ; partialListing [ i ] . readFields ( in ) ; } remainingEntries = in . readInt ( ) ;
public class ConstructorExpression { /** * Create an alias for the expression * @ return alias expression */ public Expression < T > as ( String alias ) { } }
return as ( ExpressionUtils . path ( getType ( ) , alias ) ) ;
public class SortedLists { /** * Searches the specified naturally ordered list for the specified object using the binary search * algorithm . * < p > Equivalent to { @ link # binarySearch ( List , Function , Object , Comparator , KeyPresentBehavior , * KeyAbsentBehavior ) } using { @ link Ordering # natural } . */ public static < E extends Comparable > int binarySearch ( List < ? extends E > list , E e , KeyPresentBehavior presentBehavior , KeyAbsentBehavior absentBehavior ) { } }
checkNotNull ( e ) ; return binarySearch ( list , e , Ordering . natural ( ) , presentBehavior , absentBehavior ) ;
public class InteractiveElement { /** * ( non - Javadoc ) * @ see * qc . automation . framework . widget . IInteractiveElement # dragAndDropByOffset ( IElement element ) */ public void dragAndDropByOffset ( int xOffset , int yOffset ) throws WidgetException { } }
try { Actions builder = new Actions ( getGUIDriver ( ) . getWrappedDriver ( ) ) ; synchronized ( InteractiveElement . class ) { getGUIDriver ( ) . focus ( ) ; builder . dragAndDropBy ( getWebElement ( ) , xOffset , yOffset ) . build ( ) . perform ( ) ; } } catch ( Exception e ) { throw new WidgetException ( "Error while performing drag and drop from " + getByLocator ( ) + " offset by X: " + xOffset + " Y: " + yOffset , getByLocator ( ) , e ) ; }
import java . lang . Math ; public class AreaOfRhombus { /** * This function calculates the area of a rhombus given the lengths of its diagonals . * Examples : * > > > area _ of _ rhombus ( 10 , 20) * 100.0 * > > > area _ of _ rhombus ( 10 , 5) * 25.0 * > > > area _ of _ rhombus ( 4 , 2) * 4.0 * Argument : * diagonal _ 1 , diagonal _ 2 : the lengths of the rhombus ' diagonals * Returns : * The area of the rhombus */ public static double areaOfRhombus ( double diagonal1 , double diagonal2 ) { } }
double rhombus_area = ( diagonal1 * diagonal2 ) / 2.0 ; return rhombus_area ;
public class BarcodeInter25 { /** * Creates the bars for the barcode . * @ param text the text . It can contain non numeric characters * @ return the barcode */ public static byte [ ] getBarsInter25 ( String text ) { } }
text = keepNumbers ( text ) ; if ( ( text . length ( ) & 1 ) != 0 ) throw new IllegalArgumentException ( "The text length must be even." ) ; byte bars [ ] = new byte [ text . length ( ) * 5 + 7 ] ; int pb = 0 ; bars [ pb ++ ] = 0 ; bars [ pb ++ ] = 0 ; bars [ pb ++ ] = 0 ; bars [ pb ++ ] = 0 ; int len = text . length ( ) / 2 ; for ( int k = 0 ; k < len ; ++ k ) { int c1 = text . charAt ( k * 2 ) - '0' ; int c2 = text . charAt ( k * 2 + 1 ) - '0' ; byte b1 [ ] = BARS [ c1 ] ; byte b2 [ ] = BARS [ c2 ] ; for ( int j = 0 ; j < 5 ; ++ j ) { bars [ pb ++ ] = b1 [ j ] ; bars [ pb ++ ] = b2 [ j ] ; } } bars [ pb ++ ] = 1 ; bars [ pb ++ ] = 0 ; bars [ pb ++ ] = 0 ; return bars ;
public class DriverFactory { /** * Add / replace instance in current instance map . Work on a copy of the current map and replace it atomically . * @ param instanceName * The name of the provider * @ param instance * The instance */ public static void put ( String instanceName , Driver instance ) { } }
// Copy current instances Map < String , Driver > newInstances = new HashMap < > ( ) ; synchronized ( instances ) { Set < String > keys = instances . getInstances ( ) . keySet ( ) ; for ( String key : keys ) { newInstances . put ( key , instances . getInstances ( ) . get ( key ) ) ; } } // Add new instance newInstances . put ( instanceName , instance ) ; instances = new IndexedInstances ( newInstances ) ;
public class AnalyticFormulas { /** * This static method calculated the vega of a call option under a Black - Scholes model * @ param initialStockValue The initial value of the underlying , i . e . , the spot . * @ param riskFreeRate The risk free rate of the bank account numerarie . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param optionStrike The option strike . * @ return The vega of the option */ public static double blackScholesOptionTheta ( double initialStockValue , double riskFreeRate , double volatility , double optionMaturity , double optionStrike ) { } }
if ( optionStrike <= 0.0 || optionMaturity <= 0.0 ) { // The Black - Scholes model does not consider it being an option return 0.0 ; } else { // Calculate theta double dPlus = ( Math . log ( initialStockValue / optionStrike ) + ( riskFreeRate + 0.5 * volatility * volatility ) * optionMaturity ) / ( volatility * Math . sqrt ( optionMaturity ) ) ; double dMinus = dPlus - volatility * Math . sqrt ( optionMaturity ) ; double theta = volatility * Math . exp ( - 0.5 * dPlus * dPlus ) / Math . sqrt ( 2.0 * Math . PI ) / Math . sqrt ( optionMaturity ) / 2 * initialStockValue + riskFreeRate * optionStrike * Math . exp ( - riskFreeRate * optionMaturity ) * NormalDistribution . cumulativeDistribution ( dMinus ) ; return theta ; }
public class PageTableModel { /** * Moves to specific page and fire a data changed ( all rows ) . * @ return true if we can move to the page . */ public boolean setPage ( int p ) { } }
if ( p >= 0 && p < getPageCount ( ) ) { page = p ; fireTableDataChanged ( ) ; return true ; } return false ;
public class CommandFactory { /** * Goalie special command . Tries to catch the ball in a given direction * relative to its body direction . If the catch is successful the ball will * be in the goalies hand untill kicked away . * @ param direction The direction in which to catch , relative to its body . */ public void addCatchCommand ( int direction ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( "(catch " ) ; buf . append ( direction ) ; buf . append ( ')' ) ; fifo . add ( fifo . size ( ) , buf . toString ( ) ) ;
public class ImageUtil { /** * Create a reduced image ( thumb ) of an image from the given input stream . * Possible output formats are " jpg " or " png " ( no " gif " ; it is possible to * read " gif " files , but not to write ) . The resulting thumb is written to * the output stream . The both streams will be explicitly closed by this * method . This method keeps the ratio width / height of the initial image . If * both dimensions of the initial image is less than the specified * boundaries it is not re - scaled ; it is written to the output stream as is . * @ param input the input stream containing the image to reduce * @ param output the output stream where the resulting reduced image is * written * @ param thumbWidth the maximal width of the reduced image * @ param thumbHeight the maximal height of the reduced image * @ param format the output format of the reduced image ; it can be " jpg " , * " png " , . . . */ public static void createThumb ( InputStream input , OutputStream output , int thumbWidth , int thumbHeight , String format ) throws IOException { } }
try { try { ImageInputStream imageInput = ImageIO . createImageInputStream ( input ) ; BufferedImage image = ImageIO . read ( imageInput ) ; BufferedImage thumbImage = createThumb ( image , thumbWidth , thumbHeight ) ; ImageIO . write ( thumbImage , format , output ) ; } finally { output . close ( ) ; } } finally { input . close ( ) ; }
public class AnnotationManager { /** * Update annotations on the map . * @ param annotationList list of annotation to be updated */ @ UiThread public void update ( List < T > annotationList ) { } }
for ( T annotation : annotationList ) { annotations . put ( annotation . getId ( ) , annotation ) ; } updateSource ( ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getEIM ( ) { } }
if ( eimEClass == null ) { eimEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 248 ) ; } return eimEClass ;
public class ReferNotifySender { /** * This method creates a NOTIFY message using the given parameters and sends it to the subscriber . * Knowledge of JAIN - SIP API headers is required . The request will be resent if challenged . Use * this method only if you have previously called processRefer ( ) or processSubscribe ( ) . Use this * method if you don ' t care about checking the response to the sent NOTIFY , otherwise use * sendStatefulNotify ( ) . * @ param subscriptionState - String to use as the subscription state . Overridden by sshdr . * @ param termReason - used only when subscriptionState = TERMINATED . Overridden by sshdr . * @ param body - NOTIFY body to put in the message * @ param timeLeft - expiry in seconds to put in the NOTIFY message ( used only when * subscriptionState = ACTIVE or PENDING ) . Overridden by sshdr . * @ param eventHdr - if not null , use this event header in the NOTIFY message * @ param ssHdr - if not null , use this subscription state header in the NOTIFY message instead of * building one from other parameters given . * @ param accHdr - if not null , use this accept header . Otherwise build the default package one . * @ param ctHdr - if not null , use this content type header . Otherwise build the default package * one . * @ param viaProxy If true , send the message to the proxy . In this case a Route header will be * added . Else send the message as is . * @ return true if successful , false otherwise ( call getErrorMessage ( ) for details ) . */ public boolean sendNotify ( String subscriptionState , String termReason , String body , int timeLeft , EventHeader eventHdr , SubscriptionStateHeader ssHdr , AcceptHeader accHdr , ContentTypeHeader ctHdr , boolean viaProxy ) { } }
return super . sendNotify ( subscriptionState , termReason , body , timeLeft , eventHdr , ssHdr , accHdr , ctHdr , viaProxy ) ;
public class Viterbi { /** * Compute HMM hidden states * @ param obs observations * @ param states hidden states * @ param startProb start probability of the hidden state * @ param transProb transition probability of the hidden state * @ param emitProb emission probability * @ return The most possible hidden states */ public static int [ ] compute ( int [ ] obs , int [ ] states , double [ ] startProb , double [ ] [ ] transProb , double [ ] [ ] emitProb ) { } }
double [ ] [ ] v = new double [ obs . length ] [ states . length ] ; int [ ] [ ] path = new int [ states . length ] [ obs . length ] ; for ( int y : states ) { v [ 0 ] [ y ] = startProb [ y ] * emitProb [ y ] [ obs [ 0 ] ] ; path [ y ] [ 0 ] = y ; } for ( int t = 1 ; t < obs . length ; ++ t ) { int [ ] [ ] newPath = new int [ states . length ] [ obs . length ] ; for ( int y : states ) { double prob = - 1 ; int state ; for ( int y0 : states ) { double newProb = v [ t - 1 ] [ y0 ] * transProb [ y0 ] [ y ] * emitProb [ y ] [ obs [ t ] ] ; if ( newProb > prob ) { prob = newProb ; state = y0 ; // save the max probability v [ t ] [ y ] = prob ; // save the path System . arraycopy ( path [ state ] , 0 , newPath [ y ] , 0 , t ) ; newPath [ y ] [ t ] = y ; } } } path = newPath ; } double prob = - 1 ; int state = 0 ; for ( int y : states ) { if ( v [ obs . length - 1 ] [ y ] > prob ) { prob = v [ obs . length - 1 ] [ y ] ; state = y ; } } return path [ state ] ;
public class AsyncContextImpl { /** * PI43752 start */ public void setDispatchURI ( String uri ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINEST ) ) logger . logp ( Level . FINEST , CLASS_NAME , "setDispatchURI" , "uri -> " + uri ) ; this . dispatchURI = uri ;
public class ZipFileEntry { /** * Answer the enclosing container of this entry . * Obtaining the enclosing container is expensive . The implementation * works very hard to avoid having to obtain the enclosing container . * First , the zip entry is created with the enclosing container when * the container is already available . Second , the implementation only * asks for the enclosing container when a call is made to interpret * an entry as a non - local container . That is , other than the case of * interpreting an entry as a nested directory container . * @ return The enclosing container of this entry . Since this entry is * a zip file type entry , the enclosing container is either a root * zip type container or a nested directory zip type container . */ @ Override public ArtifactContainer getEnclosingContainer ( ) { } }
// The enclosing container may be set when the entry is // created , in which case the enclosing container lock is null // and is never needed . // The entry can be created in these ways : // ZipFileContainer . createEntry ( ArtifactContainer , String , String , String , int , ZipEntry ) // - - A caching factory method of zip file container entries . // - - Caches intermediate entries . Non - container leaf entries are not cached . // That is invoked in several ways : // By : // ZipFileContainer . createEntry ( String , String , String ) // Which is invoked by : // ZipFileEntry . getEnclosingContainer ( ) // - - Used when the enclosing container is not set when the entry was // created . This happens when the entry was created with a null // enclosing container , which only happens when the entry is created // from ' ZipFileContainer . getEntry ' . // - - This is the core non - trivial step of resolving the enclosing container . // - - As a first step , if the parent is the root zip container , that is // obtained as the enclosing container . // - - As a second step , the enclosing entry of this entry is obtained , then // the enclosing container is obtained by interpreting that entry as a // container . // - - The enclosing container must be obtained from the enclosing entry // since those are cached and re - used , and since the reference to those // keep a reference to their interpreted container . // By zip container iterators : // - - com . ibm . ws . artifact . zip . internal . ZipFileEntry . getEnclosingContainer ( ) // ZipFileContainer . RootZipFileEntryIterator . next ( ) // - - always provides the root zip container as the enclosing container // ZipFileNestedDirContainer . NestedZipFileEntryIterator . next ( ) // - - always provides the nested zip container as the enclosing container // ZipFileContainer . getEntry ( String , boolean ) // - - always provides null as the enclosing container // As a public API , ' getEnclosingContainer ' may be invoked externally . // Locally , ' getEnclosingContainer ' is only invoked from : // ZipFileEntry . convertToContainer ( boolean ) // That is also a public API . // Locally , that is only invoked from : // ZipFileEntry . convertToContainer ( boolean ) // That is also a public API . if ( enclosingContainer == null ) { synchronized ( this ) { // Having a new object to guard ' enclosingContainer ' is too many objects . if ( enclosingContainer == null ) { String a_enclosingPath = PathUtils . getParent ( a_path ) ; int parentLen = a_enclosingPath . length ( ) ; if ( parentLen == 1 ) { // a _ enclosingPath = = " / " enclosingContainer = rootContainer ; } else { String r_enclosingPath = a_enclosingPath . substring ( 1 ) ; int lastSlash = r_enclosingPath . lastIndexOf ( '/' ) ; String enclosingName ; if ( lastSlash == - 1 ) { enclosingName = r_enclosingPath ; // r _ enclosingPath = " name " } else { enclosingName = r_enclosingPath . substring ( lastSlash + 1 ) ; // r _ enclosingPath = " parent / child / name " } ZipFileEntry entryInEnclosingContainer = rootContainer . createEntry ( enclosingName , a_enclosingPath ) ; enclosingContainer = entryInEnclosingContainer . convertToLocalContainer ( ) ; } } } } return enclosingContainer ;
public class ComponentTag { /** * get the content type class object */ protected Class < ? extends SlingBean > getComponentType ( ) throws ClassNotFoundException { } }
if ( componentType == null ) { String type = getType ( ) ; if ( StringUtils . isNotBlank ( type ) ) { componentType = ( Class < ? extends SlingBean > ) context . getType ( type ) ; } } return componentType ;
public class Led { /** * Sets the color that will be used to calculate the custom led color * @ param COLOR */ public void setCustomLedColor ( final Color COLOR ) { } }
if ( customLedColor . COLOR . equals ( COLOR ) ) { return ; } customLedColor = new CustomLedColor ( COLOR ) ; final boolean LED_WAS_ON = currentLedImage . equals ( ledImageOn ) ? true : false ; flushImages ( ) ; ledImageOff = create_LED_Image ( getWidth ( ) , 0 , ledColor , ledType ) ; ledImageOn = create_LED_Image ( getWidth ( ) , 1 , ledColor , ledType ) ; currentLedImage = LED_WAS_ON == true ? ledImageOn : ledImageOff ; repaint ( ) ;
public class ExpectationMaximizationGmm_F64 { /** * Using points responsibility information to recompute the Gaussians and their weights , maximizing * the likelihood of the mixture . */ protected void maximization ( ) { } }
// discard previous parameters by zeroing for ( int i = 0 ; i < mixture . size ; i ++ ) { mixture . get ( i ) . zero ( ) ; } // compute the new mean for ( int i = 0 ; i < info . size ; i ++ ) { PointInfo p = info . get ( i ) ; for ( int j = 0 ; j < mixture . size ; j ++ ) { mixture . get ( j ) . addMean ( p . point , p . weights . get ( j ) ) ; } } for ( int i = 0 ; i < mixture . size ; i ++ ) { GaussianGmm_F64 g = mixture . get ( i ) ; if ( g . weight > 0 ) CommonOps_DDRM . divide ( g . mean , g . weight ) ; } // compute new covariance for ( int i = 0 ; i < info . size ; i ++ ) { PointInfo pp = info . get ( i ) ; double [ ] p = pp . point ; for ( int j = 0 ; j < mixture . size ; j ++ ) { GaussianGmm_F64 g = mixture . get ( j ) ; for ( int k = 0 ; k < p . length ; k ++ ) { dx [ k ] = p [ k ] - g . mean . data [ k ] ; } mixture . get ( j ) . addCovariance ( dx , pp . weights . get ( j ) ) ; } } double totalMixtureWeight = 0 ; for ( int i = 0 ; i < mixture . size ; i ++ ) { GaussianGmm_F64 g = mixture . get ( i ) ; if ( g . weight > 0 ) { CommonOps_DDRM . divide ( g . covariance , g . weight ) ; totalMixtureWeight += g . weight ; } } // update the weight for ( int i = 0 ; i < mixture . size ; i ++ ) { mixture . get ( i ) . weight /= totalMixtureWeight ; }
public class MySQLStorage { /** * Lazy to avoid eager I / O */ Schema schema ( ) { } }
if ( schema == null ) { synchronized ( this ) { if ( schema == null ) { schema = new Schema ( datasource , context , strictTraceId ) ; } } } return schema ;
public class LocalDocumentStore { /** * Store the supplied document and metadata at the given key . * @ param key the key or identifier for the document * @ param document the document that is to be stored * @ see SchematicDb # put ( String , Document ) */ @ RequiresTransaction public void put ( String key , Document document ) { } }
database . put ( key , document ) ;
public class AstCompatGeneratingVisitor { /** * / / - > ^ ( NAMED _ EXPRESSION namedExprTokens ) */ @ Override public T visitNamedExpr ( NamedExprContext ctx ) { } }
// on ( ctx ) ; appendAst ( ctx , tok ( NAMED_EXPRESSION ) ) ; return super . visitNamedExpr ( ctx ) ;
public class Util { /** * Return true if Strings are equal including possible null * @ param thisStr * @ param thatStr * @ return boolean true for equal */ public static boolean equalsString ( final String thisStr , final String thatStr ) { } }
if ( ( thisStr == null ) && ( thatStr == null ) ) { return true ; } if ( thisStr == null ) { return false ; } return thisStr . equals ( thatStr ) ;
public class AuthorizeSecurityGroupIngressRequest { /** * The sets of IP permissions . * @ return The sets of IP permissions . */ public java . util . List < IpPermission > getIpPermissions ( ) { } }
if ( ipPermissions == null ) { ipPermissions = new com . amazonaws . internal . SdkInternalList < IpPermission > ( ) ; } return ipPermissions ;
public class ContentSpec { /** * Sets the list of additional files needed by the book . * @ param files The list of additional Files . */ public void setFiles ( final List < File > files ) { } }
if ( files == null && this . files == null ) { return ; } else if ( files == null ) { removeChild ( this . files ) ; this . files = null ; } else if ( this . files == null ) { this . files = new FileList ( CommonConstants . CS_FILE_TITLE , files ) ; appendChild ( this . files , false ) ; } else { this . files . setValue ( files ) ; }
public class JobConfigurationUtils { /** * Get a new { @ link Properties } instance by combining a given system configuration { @ link Properties } * object ( first ) and a job configuration { @ link Properties } object ( second ) . * @ param sysProps the given system configuration { @ link Properties } object * @ param jobProps the given job configuration { @ link Properties } object * @ return a new { @ link Properties } instance */ public static Properties combineSysAndJobProperties ( Properties sysProps , Properties jobProps ) { } }
Properties combinedJobProps = new Properties ( ) ; combinedJobProps . putAll ( sysProps ) ; combinedJobProps . putAll ( jobProps ) ; return combinedJobProps ;
public class OSMTablesFactory { /** * Drop the existing OSM tables used to store the imported OSM data * @ param connection * @ param isH2 * @ param tablePrefix * @ throws SQLException */ public static void dropOSMTables ( Connection connection , boolean isH2 , String tablePrefix ) throws SQLException { } }
TableLocation requestedTable = TableLocation . parse ( tablePrefix , isH2 ) ; String osmTableName = requestedTable . getTable ( ) ; String [ ] omsTables = new String [ ] { TAG , NODE , NODE_TAG , WAY , WAY_NODE , WAY_TAG , RELATION , RELATION_TAG , NODE_MEMBER , WAY_MEMBER , RELATION_MEMBER } ; StringBuilder sb = new StringBuilder ( "drop table if exists " ) ; String omsTableSuffix = omsTables [ 0 ] ; String osmTable = TableUtilities . caseIdentifier ( requestedTable , osmTableName + omsTableSuffix , isH2 ) ; sb . append ( osmTable ) ; for ( int i = 1 ; i < omsTables . length ; i ++ ) { omsTableSuffix = omsTables [ i ] ; osmTable = TableUtilities . caseIdentifier ( requestedTable , osmTableName + omsTableSuffix , isH2 ) ; sb . append ( "," ) . append ( osmTable ) ; } try ( Statement stmt = connection . createStatement ( ) ) { stmt . execute ( sb . toString ( ) ) ; }
public class TimedDiffCalculator { /** * Transmits the DiffTask at the end of the RevisionTask processing . * @ param task * Reference to the RevisionTask * @ param result * Reference to the DiffTask * @ throws TimeoutException * if a timeout occurred */ protected void transmitAtEndOfTask ( final Task < Revision > task , final Task < Diff > result ) throws TimeoutException { } }
this . processingTimeDiff += System . currentTimeMillis ( ) - startTime ; if ( task . getTaskType ( ) == TaskTypes . TASK_FULL || task . getTaskType ( ) == TaskTypes . TASK_PARTIAL_LAST ) { diffedSize += result . byteSize ( ) ; ArticleInformation info = result . getHeader ( ) ; info . setRevisionCounter ( revisionCounter ) ; info . setIgnoredRevisionsCounter ( ignoredRevisionsCounter ) ; info . setDiffedSize ( diffedSize ) ; info . setDiffPartCounter ( diffPartCounter ) ; info . setProcessingTimeRead ( task . getHeader ( ) . getProcessingTimeRead ( ) ) ; info . setProcessingTimeDiff ( processingTimeDiff ) ; } super . transmitAtEndOfTask ( task , result ) ;
public class DefaultTemplateEngineProvider { /** * Configure settings from the struts . xml or struts . properties , using * sensible defaults if values are not provided . */ public void configure ( ) { } }
ServletContext servletContext = ServletActionContext . getServletContext ( ) ; ServletContextTemplateResolver templateResolver = new ServletContextTemplateResolver ( servletContext ) ; templateResolver . setTemplateMode ( templateMode ) ; templateResolver . setCharacterEncoding ( characterEncoding ) ; templateResolver . setPrefix ( prefix ) ; templateResolver . setSuffix ( suffix ) ; templateResolver . setCacheable ( cacheable ) ; templateResolver . setCacheTTLMs ( cacheTtlMillis ) ; templateEngine . setTemplateResolver ( templateResolver ) ; StrutsMessageResolver messageResolver = new StrutsMessageResolver ( ) ; templateEngine . setMessageResolver ( new StrutsMessageResolver ( ) ) ; if ( templateEngine instanceof SpringTemplateEngine ) { ( ( SpringTemplateEngine ) templateEngine ) . setMessageSource ( messageResolver . getMessageSource ( ) ) ; } // extension diarects . FieldDialect fieldDialect = new FieldDialect ( TemplateMode . HTML , "sth" ) ; templateEngine . addDialect ( fieldDialect ) ;
public class ADT { /** * Splits a leaf node using the local { @ link LeafSplitter } . * @ param nodeToSplit * the leaf node to split * @ param distinguishingSuffix * the input sequence that splits the hypothesis state of the leaf to split and the new node * @ param oldOutput * the hypothesis output of the node to split given the distinguishing suffix * @ param newOutput * the hypothesis output of the new leaf given the distinguishing suffix * @ return the new leaf node */ public ADTNode < S , I , O > splitLeaf ( final ADTNode < S , I , O > nodeToSplit , final Word < I > distinguishingSuffix , final Word < O > oldOutput , final Word < O > newOutput ) { } }
if ( ! ADTUtil . isLeafNode ( nodeToSplit ) ) { throw new IllegalArgumentException ( "Node to split is not a final node" ) ; } if ( ! ( distinguishingSuffix . length ( ) == oldOutput . length ( ) && oldOutput . length ( ) == newOutput . length ( ) ) ) { throw new IllegalArgumentException ( "Distinguishing suffixes and outputs differ in length" ) ; } if ( oldOutput . equals ( newOutput ) ) { throw new IllegalArgumentException ( "Old and new output are equal" ) ; } final boolean wasRoot = this . root . equals ( nodeToSplit ) ; final ADTNode < S , I , O > result = this . leafSplitter . split ( nodeToSplit , distinguishingSuffix , oldOutput , newOutput ) ; if ( wasRoot ) { this . root = ADTUtil . getStartOfADS ( nodeToSplit ) ; } return result ;
public class EnumLiteralDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnumLiteral getEnumLiteral ( ) { } }
if ( enumLiteral != null && enumLiteral . eIsProxy ( ) ) { InternalEObject oldEnumLiteral = ( InternalEObject ) enumLiteral ; enumLiteral = ( EEnumLiteral ) eResolveProxy ( oldEnumLiteral ) ; if ( enumLiteral != oldEnumLiteral ) { if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . RESOLVE , XtextPackage . ENUM_LITERAL_DECLARATION__ENUM_LITERAL , oldEnumLiteral , enumLiteral ) ) ; } } return enumLiteral ;
public class Concurrency { /** * Return an new Concurrency object from the given executor . * @ param executor the underlying Executor * @ return a new Concurrency object */ public static Concurrency with ( final Executor executor ) { } }
if ( executor instanceof ForkJoinPool ) { return new ForkJoinPoolConcurrency ( ( ForkJoinPool ) executor ) ; } else if ( executor instanceof ExecutorService ) { return new ExecutorServiceConcurrency ( ( ExecutorService ) executor ) ; } else if ( executor == SERIAL_EXECUTOR ) { return SERIAL_EXECUTOR ; } else { return new ExecutorConcurrency ( executor ) ; }
public class HectorObjectMapper { /** * Persists the object collection using a single batch mutate . It is up to the * client to partition large collections appropriately so not to cause timeout * or buffer overflow issues . The objects can be heterogenous ( mapping to * multiple column families , etc . ) * @ param keyspace * @ param objColl * @ return */ public Collection < ? > saveObjCollection ( Keyspace keyspace , Collection < ? > objColl ) { } }
Mutator < byte [ ] > m = HFactory . createMutator ( keyspace , BytesArraySerializer . get ( ) ) ; Collection < ? > retColl = saveObjCollection ( keyspace , objColl , m ) ; m . execute ( ) ; return retColl ;
public class StockIcon { /** * Get a particular stock icon . * @ param name Icon name * @ return Icon */ public static Icon getStockIcon ( String name ) { } }
SoftReference < Icon > ref = iconcache . get ( name ) ; if ( ref != null ) { Icon icon = ref . get ( ) ; if ( icon != null ) { return icon ; } } java . net . URL imgURL = StockIcon . class . getResource ( name + ".png" ) ; if ( imgURL != null ) { Icon icon = new ImageIcon ( imgURL ) ; iconcache . put ( name , new SoftReference < > ( icon ) ) ; return icon ; } LoggingUtil . warning ( "Could not find stock icon: " + name ) ; return null ;
public class JaxWsDDHelper { /** * Get the PortComponent by servlet - link . * @ param servletLink * @ param containerToAdapt * @ return * @ throws UnableToAdaptException */ static PortComponent getPortComponentByServletLink ( String servletLink , Adaptable containerToAdapt ) throws UnableToAdaptException { } }
return getHighLevelElementByServiceImplBean ( servletLink , containerToAdapt , PortComponent . class , LinkType . SERVLET ) ;
public class NodeAction { /** * 修改Node */ public void doEdit ( @ FormGroup ( "nodeInfo" ) Group nodeInfo , @ FormGroup ( "nodeParameterInfo" ) Group nodeParameterInfo , @ Param ( "pageIndex" ) int pageIndex , @ Param ( "searchKey" ) String searchKey , @ FormField ( name = "formNodeError" , group = "nodeInfo" ) CustomErrors err , Navigator nav ) throws Exception { } }
Node node = new Node ( ) ; NodeParameter parameter = new NodeParameter ( ) ; nodeInfo . setProperties ( node ) ; nodeParameterInfo . setProperties ( parameter ) ; if ( parameter . getDownloadPort ( ) == null || parameter . getDownloadPort ( ) == 0 ) { parameter . setDownloadPort ( node . getPort ( ) . intValue ( ) + 1 ) ; } if ( parameter . getMbeanPort ( ) == null || parameter . getMbeanPort ( ) == 0 ) { parameter . setMbeanPort ( node . getPort ( ) . intValue ( ) + 2 ) ; } Long autoKeeperclusterId = nodeParameterInfo . getField ( "autoKeeperclusterId" ) . getLongValue ( ) ; if ( autoKeeperclusterId != null && autoKeeperclusterId > 0 ) { AutoKeeperCluster autoKeeperCluster = autoKeeperClusterService . findAutoKeeperClusterById ( autoKeeperclusterId ) ; parameter . setZkCluster ( autoKeeperCluster ) ; } node . setParameters ( parameter ) ; try { nodeService . modify ( node ) ; } catch ( RepeatConfigureException rce ) { err . setMessage ( "invalidNode" ) ; return ; } nav . redirectToLocation ( "nodeList.htm?pageIndex=" + pageIndex + "&searchKey=" + urlEncode ( searchKey ) ) ;
public class BitmapIterationBenchmark { /** * Benchmark of cumulative cost of bitmap union with subsequent iteration over the result . This is a pattern from * query processing on historical nodes , when filters like { @ link org . apache . druid . segment . filter . DimensionPredicateFilter } , * { @ link org . apache . druid . query . filter . RegexDimFilter } , { @ link org . apache . druid . query . filter . SearchQueryDimFilter } and similar are * used . */ @ Benchmark public int unionAndIter ( BitmapsForUnion state ) { } }
ImmutableBitmap intersection = factory . union ( Arrays . asList ( state . bitmaps ) ) ; return iter ( intersection ) ;
public class Element { /** * This is set when the element is associated with the page . * @ see Page # addElement ( com . aoindustries . docs . Element ) */ void setPage ( Page page ) { } }
synchronized ( lock ) { checkNotFrozen ( ) ; if ( this . page != null ) throw new IllegalStateException ( "element already has a page: " + this ) ; this . page = page ; } assert checkPageAndParentElement ( ) ;
public class VertxResourceAdapter { /** * This is called when a message endpoint is deactivated . * @ param endpointFactory * A message endpoint factory instance . * @ param spec * An activation spec JavaBean instance . */ public void endpointDeactivation ( MessageEndpointFactory endpointFactory , ActivationSpec spec ) { } }
VertxActivation activation = activations . remove ( spec ) ; if ( activation != null ) activation . stop ( ) ; log . finest ( "endpointDeactivation()" ) ;
public class TCPInputPoller { /** * Pop the oldest command from our list and return it . * @ return the oldest unhandled command in our list */ public CommandAndIPAddress getCommandAndIPAddress ( ) { } }
CommandAndIPAddress command = null ; synchronized ( this ) { if ( commandQueue . size ( ) > 0 ) { command = commandQueue . remove ( 0 ) ; } } return command ;
public class ULocale { /** * displayLocaleID is canonical , localeID need not be since parsing will fix this . */ private static String getDisplayCountryInternal ( ULocale locale , ULocale displayLocale ) { } }
return LocaleDisplayNames . getInstance ( displayLocale ) . regionDisplayName ( locale . getCountry ( ) ) ;
public class StringContext { /** * Finds the indices for each occurrence of the given search string in the * source string , starting from the given index . * @ param str the source string * @ param search the string to search for * @ param fromIndex index to start the find * @ return an array of indices , which is empty if the search string * wasn ' t found */ public int [ ] find ( String str , String search , int fromIndex ) { } }
if ( str == null || search == null ) { return new int [ 0 ] ; } int [ ] indices = new int [ 10 ] ; int size = 0 ; int index = fromIndex ; while ( ( index = str . indexOf ( search , index ) ) >= 0 ) { if ( size >= indices . length ) { // Expand capacity . int [ ] newArray = new int [ indices . length * 2 ] ; System . arraycopy ( indices , 0 , newArray , 0 , indices . length ) ; indices = newArray ; } indices [ size ++ ] = index ; index += search . length ( ) ; } if ( size < indices . length ) { // Trim capacity . int [ ] newArray = new int [ size ] ; System . arraycopy ( indices , 0 , newArray , 0 , size ) ; indices = newArray ; } return indices ;
public class ArbitrarySqlTask { /** * This task will only execute if the following column exists */ public void addExecuteOnlyIfColumnExists ( String theTableName , String theColumnName ) { } }
myConditionalOnExistenceOf . add ( new TableAndColumn ( theTableName , theColumnName ) ) ;
public class RestTemplateBuilder { /** * Set the { @ code Supplier } of { @ link ClientHttpRequestFactory } that should be called * each time we { @ link # build ( ) } a new { @ link RestTemplate } instance . * @ param requestFactorySupplier the supplier for the request factory * @ return a new builder instance * @ since 2.0.0 */ public RestTemplateBuilder requestFactory ( Supplier < ClientHttpRequestFactory > requestFactorySupplier ) { } }
Assert . notNull ( requestFactorySupplier , "RequestFactory Supplier must not be null" ) ; return new RestTemplateBuilder ( this . detectRequestFactory , this . rootUri , this . messageConverters , requestFactorySupplier , this . uriTemplateHandler , this . errorHandler , this . basicAuthentication , this . restTemplateCustomizers , this . requestFactoryCustomizer , this . interceptors ) ;
public class BatchingEntityLoaderBuilder { /** * Builds a batch - fetch capable loader based on the given persister , lock - options , etc . * @ param persister The entity persister * @ param batchSize The maximum number of ids to batch - fetch at once * @ param lockOptions The lock options * @ param factory The SessionFactory * @ param influencers Any influencers that should affect the built query * @ param innerEntityLoaderBuilder Builder of the entity loader receiving the subset of batches * @ return The loader . */ public UniqueEntityLoader buildLoader ( OuterJoinLoadable persister , int batchSize , LockOptions lockOptions , SessionFactoryImplementor factory , LoadQueryInfluencers influencers , BatchableEntityLoaderBuilder innerEntityLoaderBuilder ) { } }
if ( batchSize <= 1 ) { // no batching return buildNonBatchingLoader ( persister , lockOptions , factory , influencers , innerEntityLoaderBuilder ) ; } return buildBatchingLoader ( persister , batchSize , lockOptions , factory , influencers , innerEntityLoaderBuilder ) ;
public class BlockInlineChecksumReader { /** * Implement Scatter Gather read . Since checksum and data are saved separately , * we go over the data file twice , the first time for checksums and the second * time for data . The speed of it then is not necessarily to be faster than * normal read ( ) and is likely to be slower . We have this method here just * for backward compatible . * @ param s * @ param replica * @ param dataFile * @ param block * @ param startOffset * @ param length * @ param datanode * @ return * @ throws IOException */ static long readBlockAccelerator ( Socket s , ReplicaToRead replica , File dataFile , Block block , long startOffset , long length , DataNode datanode ) throws IOException { } }
FileInputStream datain = new FileInputStream ( dataFile ) ; FileChannel dch = datain . getChannel ( ) ; int type = replica . getChecksumType ( ) ; int bytesPerChecksum = replica . getBytesPerChecksum ( ) ; long checksumSize = DataChecksum . getChecksumSizeByType ( type ) ; DataChecksum checksum = DataChecksum . newDataChecksum ( type , bytesPerChecksum ) ; // align the startOffset with the previous bytesPerChecksum boundary . long delta = startOffset % bytesPerChecksum ; startOffset -= delta ; length += delta ; // align the length to encompass the entire last checksum chunk delta = length % bytesPerChecksum ; if ( delta != 0 ) { delta = bytesPerChecksum - delta ; length += delta ; } // find the offset in the metafile long startChunkNumber = startOffset / bytesPerChecksum ; long numChunks = length / bytesPerChecksum ; // get a connection back to the client SocketOutputStream out = new SocketOutputStream ( s , datanode . socketWriteTimeout ) ; try { // Write checksum information checksum . writeHeader ( new DataOutputStream ( out ) ) ; // Transfer checksums int remain = ( int ) length ; long pos = startChunkNumber * ( bytesPerChecksum + checksumSize ) ; for ( int i = 0 ; i < numChunks ; i ++ ) { assert remain > 0 ; int lenToRead = ( remain > bytesPerChecksum ) ? bytesPerChecksum : remain ; pos += lenToRead ; dch . position ( pos ) ; long val = dch . transferTo ( pos , checksumSize , out ) ; if ( val != checksumSize ) { String msg = "readBlockAccelerator for block " + block + " at offset " + pos + " Cannot read the full checksum." ; LOG . warn ( msg ) ; throw new IOException ( msg ) ; } pos += checksumSize ; remain -= lenToRead ; } // Transfer data remain = ( int ) length ; pos = startChunkNumber * ( bytesPerChecksum + checksumSize ) ; for ( int i = 0 ; i < numChunks ; i ++ ) { assert remain > 0 ; dch . position ( pos ) ; int lenToRead = ( remain > bytesPerChecksum ) ? bytesPerChecksum : remain ; long val = dch . transferTo ( pos , lenToRead , out ) ; if ( val != lenToRead ) { String msg = "readBlockAccelerator for block " + block + " at offset " + pos + " Cannot read a full chunk." ; LOG . warn ( msg ) ; throw new IOException ( msg ) ; } pos += lenToRead + checksumSize ; remain -= lenToRead ; } return length ; } catch ( SocketException ignored ) { // Its ok for remote side to close the connection anytime . datanode . myMetrics . blocksRead . inc ( ) ; return - 1 ; } catch ( IOException ioe ) { /* What exactly should we do here ? * Earlier version shutdown ( ) datanode if there is disk error . */ LOG . warn ( datanode . getDatanodeInfo ( ) + ":readBlockAccelerator:Got exception while serving " + block + " to " + s . getInetAddress ( ) + ":\n" + StringUtils . stringifyException ( ioe ) ) ; throw ioe ; } finally { IOUtils . closeStream ( out ) ; IOUtils . closeStream ( datain ) ; }
public class ManagedServerBootCmdFactory { /** * Adds the absolute path to command . * @ param command the command to add the arguments to . * @ param typeName the type of directory . * @ param propertyName the name of the property . * @ param properties the properties where the path may already be defined . * @ param directoryGrouping the directory group type . * @ param typeDir the domain level directory for the given directory type ; to be used for by - type grouping * @ param serverDir the root directory for the server , to be used for ' by - server ' grouping * @ return the absolute path that was added . */ private String addPathProperty ( final List < String > command , final String typeName , final String propertyName , final Map < String , String > properties , final DirectoryGrouping directoryGrouping , final File typeDir , File serverDir ) { } }
final String result ; final String value = properties . get ( propertyName ) ; if ( value == null ) { switch ( directoryGrouping ) { case BY_TYPE : result = getAbsolutePath ( typeDir , "servers" , serverName ) ; break ; case BY_SERVER : default : result = getAbsolutePath ( serverDir , typeName ) ; break ; } properties . put ( propertyName , result ) ; } else { final File dir = new File ( value ) ; switch ( directoryGrouping ) { case BY_TYPE : result = getAbsolutePath ( dir , "servers" , serverName ) ; break ; case BY_SERVER : default : result = getAbsolutePath ( dir , serverName ) ; break ; } } command . add ( String . format ( "-D%s=%s" , propertyName , result ) ) ; return result ;
public class JBaseScreen { /** * Return the FieldList describing the screen ' s fields . * Note : This is used so you can override this method to use more that one field list . * @ return The fieldlist for this screen ( null if you are past the end of the list ) . */ public FieldList getFieldList ( String strFileName ) { } }
if ( m_vFieldListList == null ) return null ; for ( int i = 0 ; ; i ++ ) { // Step 1 - Disconnect the controls from the fields FieldList fieldInList = this . getFieldList ( i ) ; if ( fieldInList == null ) break ; if ( strFileName . equalsIgnoreCase ( fieldInList . getTableNames ( false ) ) ) return fieldInList ; // Found , already there . } return null ;
public class Bytes { /** * Copy a subsequence of Bytes to specific byte array . Uses the specified offset in the dest byte * array to start the copy . * @ param start index of subsequence start ( inclusive ) * @ param end index of subsequence end ( exclusive ) * @ param dest destination array * @ param destPos starting position in the destination data . * @ exception IndexOutOfBoundsException if copying would cause access of data outside array * bounds . * @ exception NullPointerException if either < code > src < / code > or < code > dest < / code > is * < code > null < / code > . * @ since 1.1.0 */ public void copyTo ( int start , int end , byte [ ] dest , int destPos ) { } }
// this . subSequence ( start , end ) . copyTo ( dest , destPos ) would allocate another Bytes object arraycopy ( start , dest , destPos , end - start ) ;
public class StringUtils { /** * append values to buf separated with the specified separator */ public static void join ( StringBuilder buf , Iterable < ? > values , String separator ) { } }
for ( Iterator < ? > i = values . iterator ( ) ; i . hasNext ( ) ; ) { buf . append ( i . next ( ) ) ; if ( i . hasNext ( ) ) { buf . append ( separator ) ; } }
public class VdmDropAdapterAssistent { /** * Adds the given status to the list of problems . Discards OK statuses . If * the status is a multi - status , only its children are added . */ private void mergeStatus ( MultiStatus status , IStatus toMerge ) { } }
if ( ! toMerge . isOK ( ) ) { status . merge ( toMerge ) ; }
public class Cipher { /** * Initializes this cipher with a key , a set of algorithm * parameters , and a source of randomness . * < p > The cipher is initialized for one of the following four operations : * encryption , decryption , key wrapping or key unwrapping , depending * on the value of < code > opmode < / code > . * < p > If this cipher requires any algorithm parameters and * < code > params < / code > is null , the underlying cipher implementation is * supposed to generate the required parameters itself ( using * provider - specific default or random values ) if it is being * initialized for encryption or key wrapping , and raise an * < code > InvalidAlgorithmParameterException < / code > if it is being * initialized for decryption or key unwrapping . * The generated parameters can be retrieved using * { @ link # getParameters ( ) getParameters } or * { @ link # getIV ( ) getIV } ( if the parameter is an IV ) . * < p > If this cipher requires algorithm parameters that cannot be * derived from the input parameters , and there are no reasonable * provider - specific default values , initialization will * necessarily fail . * < p > If this cipher ( including its underlying feedback or padding scheme ) * requires any random bytes ( e . g . , for parameter generation ) , it will get * them from < code > random < / code > . * < p > Note that when a Cipher object is initialized , it loses all * previously - acquired state . In other words , initializing a Cipher is * equivalent to creating a new instance of that Cipher and initializing * it . * @ param opmode the operation mode of this cipher ( this is one of the * following : * < code > ENCRYPT _ MODE < / code > , < code > DECRYPT _ MODE < / code > , * < code > WRAP _ MODE < / code > or < code > UNWRAP _ MODE < / code > ) * @ param key the encryption key * @ param params the algorithm parameters * @ param random the source of randomness * @ exception InvalidKeyException if the given key is inappropriate for * initializing this cipher , or its keysize exceeds the maximum allowable * keysize ( as determined from the configured jurisdiction policy files ) . * @ exception InvalidAlgorithmParameterException if the given algorithm * parameters are inappropriate for this cipher , * or this cipher requires * algorithm parameters and < code > params < / code > is null , or the given * algorithm parameters imply a cryptographic strength that would exceed * the legal limits ( as determined from the configured jurisdiction * policy files ) . */ public final void init ( int opmode , Key key , AlgorithmParameterSpec params , SecureRandom random ) throws InvalidKeyException , InvalidAlgorithmParameterException { } }
initialized = false ; checkOpmode ( opmode ) ; chooseProvider ( InitType . ALGORITHM_PARAM_SPEC , opmode , key , params , null , random ) ; initialized = true ; this . opmode = opmode ;
public class DebugWsDelegate { /** * Runs a diagnostic for a given instance . * @ return the instance * @ throws DebugWsException */ public Diagnostic diagnoseInstance ( String applicationName , String instancePath ) throws DebugWsException { } }
this . logger . finer ( "Diagnosing instance " + instancePath + " in application " + applicationName ) ; WebResource path = this . resource . path ( UrlConstants . DEBUG ) . path ( "diagnose-instance" ) ; path = path . queryParam ( "application-name" , applicationName ) ; path = path . queryParam ( "instance-path" , instancePath ) ; ClientResponse response = this . wsClient . createBuilder ( path ) . accept ( MediaType . APPLICATION_JSON ) . get ( ClientResponse . class ) ; if ( Family . SUCCESSFUL != response . getStatusInfo ( ) . getFamily ( ) ) { String value = response . getEntity ( String . class ) ; this . logger . finer ( response . getStatusInfo ( ) + ": " + value ) ; throw new DebugWsException ( response . getStatusInfo ( ) . getStatusCode ( ) , value ) ; } this . logger . finer ( String . valueOf ( response . getStatusInfo ( ) ) ) ; return response . getEntity ( Diagnostic . class ) ;
public class Solo { /** * Returns a Button matching the specified index . * @ param index the index of the { @ link Button } . { @ code 0 } if only one is available * @ return a { @ link Button } matching the specified index */ public Button getButton ( int index ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "getButton(" + index + ")" ) ; } return getter . getView ( Button . class , index ) ;
public class ArtefactHandlerAdapter { /** * < p > Creates new GrailsClass derived object using the type supplied in constructor . May not perform * optimally but is a convenience . < / p > * @ param artefactClass Creates a new artefact for the given class * @ return An instance of the GrailsClass interface representing the artefact */ public GrailsClass newArtefactClass ( @ SuppressWarnings ( "rawtypes" ) Class artefactClass ) { } }
try { Constructor < ? > c = grailsClassImpl . getDeclaredConstructor ( new Class [ ] { Class . class } ) ; // TODO GRAILS - 720 plugin class instance created here first return ( GrailsClass ) c . newInstance ( new Object [ ] { artefactClass } ) ; } catch ( NoSuchMethodException e ) { throw new GrailsRuntimeException ( "Unable to locate constructor with Class parameter for " + artefactClass , e ) ; } catch ( IllegalAccessException e ) { throw new GrailsRuntimeException ( "Unable to locate constructor with Class parameter for " + artefactClass , e ) ; } catch ( InvocationTargetException e ) { throw new GrailsRuntimeException ( "Error instantiated artefact class [" + artefactClass + "] of type [" + grailsClassImpl + "]: " + ( e . getMessage ( ) != null ? e . getMessage ( ) : e . getClass ( ) . getSimpleName ( ) ) , e ) ; } catch ( InstantiationException e ) { throw new GrailsRuntimeException ( "Error instantiated artefact class [" + artefactClass + "] of type [" + grailsClassImpl + "]: " + ( e . getMessage ( ) != null ? e . getMessage ( ) : e . getClass ( ) . getSimpleName ( ) ) , e ) ; }
public class LittleEndianRandomAccessFile { /** * Writes a string to the underlying output stream as a sequence of * bytes . Each character is written to the data output stream as * if by the { @ code writeByte ( ) } method . * @ param pString the { @ code String } value to be written . * @ throws IOException if the underlying stream throws an IOException . * @ see # writeByte ( int ) * @ see # file */ public void writeBytes ( String pString ) throws IOException { } }
int length = pString . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { file . write ( ( byte ) pString . charAt ( i ) ) ; }