signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SimpleParserImpl { /** * object = " { " [ member { " , " member } ] " } " . */ public Object parseObject ( ) { } }
Map map = new SimpleBindings ( ) ; advance ( ) ; if ( T . getType ( ) == TokenType . STRING ) { parseMember ( map ) ; while ( T . getType ( ) == TokenType . COMMA ) { advance ( ) ; parseMember ( map ) ; } } checkAndSkip ( TokenType . RCURLY , "}" ) ; return map ;
public class IO { /** * reading the data . */ public static Value freadval ( Value fval , Context ctxt ) { } }
ValueList result = new ValueList ( ) ; try { File file = getFile ( fval ) ; LexTokenReader ltr = new LexTokenReader ( file , Dialect . VDM_PP , VDMJ . filecharset ) ; ExpressionReader reader = new ExpressionReader ( ltr ) ; reader . setCurrentModule ( "IO" ) ; PExp exp = reader . readExpression ( ) ; Interpreter ip = Interpreter . getInstance ( ) ; ip . typeCheck ( exp , ip . getGlobalEnvironment ( ) ) ; result . add ( new BooleanValue ( true ) ) ; result . add ( exp . apply ( VdmRuntime . getExpressionEvaluator ( ) , ctxt ) ) ; } catch ( Exception e ) { lastError = e . toString ( ) ; result = new ValueList ( ) ; result . add ( new BooleanValue ( false ) ) ; result . add ( new NilValue ( ) ) ; } return new TupleValue ( result ) ;
public class DeploymentScannerService { /** * { @ inheritDoc } */ @ Override public synchronized void stop ( StopContext context ) { } }
final DeploymentScanner scanner = this . scanner ; notificationRegistryValue . getValue ( ) . unregisterNotificationHandler ( ANY_ADDRESS , this . scanner , DEPLOYMENT_FILTER ) ; this . scanner = null ; scanner . stopScanner ( ) ; scheduledExecutorValue . getValue ( ) . shutdown ( ) ; if ( callbackHandle != null ) { callbackHandle . remove ( ) ; }
public class StatisticalTagger { /** * Produce postags from a tokenized sentence . * @ param tokens * the sentence * @ return a list containing the postags */ public final List < String > posAnnotate ( final String [ ] tokens ) { } }
final String [ ] annotatedText = this . posTagger . tag ( tokens ) ; final List < String > posTags = new ArrayList < String > ( Arrays . asList ( annotatedText ) ) ; return posTags ;
public class SameDiff { /** * Calculate the regularization ( L1 , L2 and / or WeightDecay ) component of the loss function for the current parameters . . * Note that the training configuration must be set ( via { @ link # setTrainingConfig ( TrainingConfig ) } ) before this * method can be called * @ return The regularization component of the score / loss function */ public double calcRegularizationScore ( ) { } }
Preconditions . checkState ( trainingConfig != null , "No training configuration has been set. A training configuration must " + "be set before calculating the L2 loss. Use setTrainingConfig(TrainingConfig)" ) ; if ( trainingConfig . getRegularization ( ) == null || trainingConfig . getRegularization ( ) . isEmpty ( ) ) { return 0.0 ; } if ( trainingConfig . getTrainableParams ( ) == null || trainingConfig . getTrainableParams ( ) . isEmpty ( ) ) initializeTraining ( ) ; List < Regularization > l = trainingConfig . getRegularization ( ) ; double loss = 0.0 ; for ( String s : trainingConfig . getTrainableParams ( ) ) { for ( Regularization r : l ) { INDArray arr = getVariable ( s ) . getArr ( ) ; loss += r . score ( arr , trainingConfig . getIterationCount ( ) , trainingConfig . getEpochCount ( ) ) ; } } return loss ;
public class Choice5 { /** * Static factory method for wrapping a value of type < code > D < / code > in a { @ link Choice5 } . * @ param d the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ param < D > the fourth possible type * @ param < E > the fifth possible type * @ return the wrapped value as a { @ link Choice5 } & lt ; A , B , C , D , E & gt ; */ public static < A , B , C , D , E > Choice5 < A , B , C , D , E > d ( D d ) { } }
return new _D < > ( d ) ;
public class FallbackDecorator { /** * Calls the fallback if the invocationCall throws an { @ link Exception } . * @ throws IllegalArgumentException if the fallback object does not have a corresponding * fallback method . */ @ Override public CheckedFunction1 < Object [ ] , Object > decorate ( CheckedFunction1 < Object [ ] , Object > invocationCall , Method method , MethodHandler methodHandler , Target < ? > target ) { } }
final Method fallbackMethod ; validateFallback ( method ) ; fallbackMethod = getFallbackMethod ( method ) ; return args -> { try { return invocationCall . apply ( args ) ; } catch ( final Exception exception ) { if ( filter . test ( exception ) ) { return fallbackMethod . invoke ( fallback , args ) ; } throw exception ; } } ;
public class IPv6AddressPool { /** * Give a network back to the pool ( de - allocate ) . * @ param toDeAllocate network to de - allocate */ public IPv6AddressPool deAllocate ( final IPv6Network toDeAllocate ) { } }
if ( ! contains ( toDeAllocate ) ) { throw new IllegalArgumentException ( "Network to de-allocate[" + toDeAllocate + "] is not contained in this allocatable range [" + this + "]" ) ; } // find ranges just in front or after the network to deallocate . These are the ranges to merge with to prevent fragmentation . final IPv6AddressRange freeRangeBeforeNetwork = findFreeRangeBefore ( toDeAllocate ) ; final IPv6AddressRange freeRangeAfterNetwork = findFreeRangeAfter ( toDeAllocate ) ; final TreeSet < IPv6AddressRange > newFreeRanges = new TreeSet < IPv6AddressRange > ( this . freeRanges ) ; if ( ( freeRangeBeforeNetwork == null ) && ( freeRangeAfterNetwork == null ) ) { // nothing to " defragment " newFreeRanges . add ( toDeAllocate ) ; } else { if ( ( freeRangeBeforeNetwork != null ) && ( freeRangeAfterNetwork != null ) ) { // merge two existing ranges newFreeRanges . remove ( freeRangeBeforeNetwork ) ; newFreeRanges . remove ( freeRangeAfterNetwork ) ; newFreeRanges . add ( IPv6AddressRange . fromFirstAndLast ( freeRangeBeforeNetwork . getFirst ( ) , freeRangeAfterNetwork . getLast ( ) ) ) ; } else if ( freeRangeBeforeNetwork != null ) { // append newFreeRanges . remove ( freeRangeBeforeNetwork ) ; newFreeRanges . add ( IPv6AddressRange . fromFirstAndLast ( freeRangeBeforeNetwork . getFirst ( ) , toDeAllocate . getLast ( ) ) ) ; } else /* if ( freeRangeAfterNetwork ! = null ) */ { // prepend newFreeRanges . remove ( freeRangeAfterNetwork ) ; newFreeRanges . add ( IPv6AddressRange . fromFirstAndLast ( toDeAllocate . getFirst ( ) , freeRangeAfterNetwork . getLast ( ) ) ) ; } } return new IPv6AddressPool ( underlyingRange , allocationSubnetSize , newFreeRanges , getLastAllocated ( ) ) ;
public class LayerUtil { /** * Upper left tile for an area . * @ param boundingBox the area boundingBox * @ param zoomLevel the zoom level . * @ param tileSize the tile size . * @ return the tile at the upper left of the bbox . */ public static Tile getUpperLeft ( BoundingBox boundingBox , byte zoomLevel , int tileSize ) { } }
int tileLeft = MercatorProjection . longitudeToTileX ( boundingBox . minLongitude , zoomLevel ) ; int tileTop = MercatorProjection . latitudeToTileY ( boundingBox . maxLatitude , zoomLevel ) ; return new Tile ( tileLeft , tileTop , zoomLevel , tileSize ) ;
public class LineBasedFrameDecoder { /** * Returns the index in the buffer of the end of line found . * Returns - 1 if no end of line was found in the buffer . */ private int findEndOfLine ( final ByteBuf buffer ) { } }
int totalLength = buffer . readableBytes ( ) ; int i = buffer . forEachByte ( buffer . readerIndex ( ) + offset , totalLength - offset , ByteProcessor . FIND_LF ) ; if ( i >= 0 ) { offset = 0 ; if ( i > 0 && buffer . getByte ( i - 1 ) == '\r' ) { i -- ; } } else { offset = totalLength ; } return i ;
public class XMPPBOSHConnection { /** * Send a HTTP request to the connection manager with the provided body element . * @ param body the body which will be sent . * @ throws BOSHException */ protected void send ( ComposableBody body ) throws BOSHException { } }
if ( ! connected ) { throw new IllegalStateException ( "Not connected to a server!" ) ; } if ( body == null ) { throw new NullPointerException ( "Body mustn't be null!" ) ; } if ( sessionID != null ) { body = body . rebuild ( ) . setAttribute ( BodyQName . create ( BOSH_URI , "sid" ) , sessionID ) . build ( ) ; } client . send ( body ) ;
public class OtpOutputStream { /** * Write an old style Erlang ref to the stream . * @ param node * the nodename . * @ param id * an arbitrary number . Only the low order 18 bits will be used . * @ param creation * another arbitrary number . */ public void write_ref ( final String node , final int id , final int creation ) { } }
/* Always encode as an extended reference ; all participating parties are now expected to be able to decode extended references . */ int ids [ ] = new int [ 1 ] ; ids [ 0 ] = id ; write_ref ( node , ids , creation ) ;
public class OrientedBox3d { /** * Set the second axis of the box . * The third axis is updated to be perpendicular to the two other axis . * @ param vector * @ param extent * @ param system */ public void setFirstAxisProperties ( Vector3d vector , DoubleProperty extent , CoordinateSystem3D system ) { } }
this . axis1 . setProperties ( vector . xProperty , vector . yProperty , vector . zProperty ) ; assert ( this . axis1 . isUnitVector ( ) ) ; if ( system . isLeftHanded ( ) ) { this . axis3 . set ( this . axis1 . crossLeftHand ( this . axis2 ) ) ; } else { this . axis3 . set ( this . axis3 . crossRightHand ( this . axis2 ) ) ; } this . extent1Property = extent ;
public class IntListUtil { /** * Normalizes an array of integers from the bounding [ min , max ] to * [ 0.0 , 1.0 ] . If min = = max , all elements in the returned array * will be 1f . */ public static float [ ] normalize ( int [ ] values ) { } }
// Allocate storage for the normalized array float [ ] normalized = new float [ values . length ] ; // Determine the minimum and maximum int min = getMinValue ( values ) ; int max = getMaxValue ( values ) ; int spread = max - min ; // If there is no spread , return a flat normalization if ( spread == 0 ) { Arrays . fill ( normalized , 1f ) ; return normalized ; } // Normalize each value in the input array for ( int i = 0 ; i < values . length ; i ++ ) { normalized [ i ] = ( values [ i ] - min ) / ( float ) spread ; } return normalized ;
public class StringUtils { /** * < p > Checks if all of the CharSequences are empty ( " " ) or null . < / p > * < pre > * StringUtils . isAllEmpty ( null ) = true * StringUtils . isAllEmpty ( null , " " ) = true * StringUtils . isAllEmpty ( new String [ ] { } ) = true * StringUtils . isAllEmpty ( null , " foo " ) = false * StringUtils . isAllEmpty ( " " , " bar " ) = false * StringUtils . isAllEmpty ( " bob " , " " ) = false * StringUtils . isAllEmpty ( " bob " , null ) = false * StringUtils . isAllEmpty ( " " , " bar " ) = false * StringUtils . isAllEmpty ( " foo " , " bar " ) = false * < / pre > * @ param css the CharSequences to check , may be null or empty * @ return { @ code true } if all of the CharSequences are empty or null * @ since 3.6 */ public static boolean isAllEmpty ( final CharSequence ... css ) { } }
if ( ArrayUtils . isEmpty ( css ) ) { return true ; } for ( final CharSequence cs : css ) { if ( isNotEmpty ( cs ) ) { return false ; } } return true ;
public class TermUtil { /** * Tries to obtain the terminal dimensions by running ' resize ' . */ protected static Dimension getSizeViaResize ( ) { } }
BufferedReader bin = null ; try { Process proc = Runtime . getRuntime ( ) . exec ( "resize" ) ; InputStream in = proc . getInputStream ( ) ; bin = new BufferedReader ( new InputStreamReader ( in ) ) ; Pattern regex = Pattern . compile ( "([0-9]+)" ) ; String line ; int columns = - 1 , lines = - 1 ; while ( ( line = bin . readLine ( ) ) != null ) { if ( line . indexOf ( "COLUMNS" ) != - 1 ) { Matcher match = regex . matcher ( line ) ; if ( match . find ( ) ) { columns = safeToInt ( match . group ( ) ) ; } } else if ( line . indexOf ( "LINES" ) != - 1 ) { Matcher match = regex . matcher ( line ) ; if ( match . find ( ) ) { lines = safeToInt ( match . group ( ) ) ; } } } if ( columns != - 1 && lines != - 1 ) { return new Dimension ( columns , lines ) ; } return null ; } catch ( PatternSyntaxException pse ) { return null ; // logging a warning here may be annoying } catch ( SecurityException se ) { return null ; // logging a warning here may be annoying } catch ( IOException ioe ) { return null ; // logging a warning here may be annoying } finally { StreamUtil . close ( bin ) ; }
public class CSVLoader { /** * Create the application from the configured schema file name . */ private void createApplication ( ) { } }
String schema = getSchema ( ) ; ContentType contentType = null ; if ( m_config . schema . toLowerCase ( ) . endsWith ( ".json" ) ) { contentType = ContentType . APPLICATION_JSON ; } else if ( m_config . schema . toLowerCase ( ) . endsWith ( ".xml" ) ) { contentType = ContentType . TEXT_XML ; } else { logErrorThrow ( "Unknown file type for schema: {}" , m_config . schema ) ; } try { m_logger . info ( "Creating application '{}' with schema: {}" , m_config . app , m_config . schema ) ; m_client . createApplication ( schema , contentType ) ; } catch ( Exception e ) { logErrorThrow ( "Error creating schema: {}" , e ) ; } try { m_session = m_client . openApplication ( m_config . app ) ; } catch ( RuntimeException e ) { logErrorThrow ( "Application '{}' not found after creation: {}." , m_config . app , e . toString ( ) ) ; } String ss = m_session . getAppDef ( ) . getStorageService ( ) ; if ( ! Utils . isEmpty ( ss ) && ss . startsWith ( "OLAP" ) ) { m_bOLAPApp = true ; } loadTables ( ) ;
public class DeviceProxyDAODefaultImpl { public void poll_command ( final DeviceProxy deviceProxy , final String cmdname , final int period ) throws DevFailed { } }
poll_object ( deviceProxy , cmdname , "command" , period ) ;
public class DashboardDto { /** * Converts list of dashboard entity objects to list of dashboardDto objects . * @ param dashboards List of dashboard entities . Cannot be null . * @ return List of dashboard objects . * @ throws WebApplicationException If an error occurs . */ public static List < DashboardDto > transformToDto ( List < Dashboard > dashboards ) { } }
if ( dashboards == null ) { throw new WebApplicationException ( "Null entity object cannot be converted to Dto object." , Status . INTERNAL_SERVER_ERROR ) ; } List < DashboardDto > result = new ArrayList < DashboardDto > ( ) ; for ( Dashboard dashboard : dashboards ) { result . add ( transformToDto ( dashboard ) ) ; } return result ;
public class JobDataMap { /** * Retrieve the identified < code > String < / code > value from the < code > JobDataMap < / code > . * @ throws ClassCastException if the identified object is not a String . */ public String getString ( String key ) { } }
Object obj = get ( key ) ; try { return ( String ) obj ; } catch ( Exception e ) { throw new ClassCastException ( "Identified object is not a String." ) ; }
public class EncodingContext { /** * Serialize the given value into an array of bytes . This uses the standard * Java - based serializers ( { @ link ObjectOutputStream } ) to serialize the * data . * @ param value The value to serialize * @ return The array of bytes representing the serialized form of the value * @ throws IOException if an error occurs during serialization * @ see # deserialize ( byte [ ] ) * @ see # serializeBase64 ( Serializable ) */ public byte [ ] serialize ( Serializable value ) throws IOException { } }
// create buffer and underlying output stream ByteArrayOutputStream buffer = new ByteArrayOutputStream ( 1024 ) ; ObjectOutputStream output = new ObjectOutputStream ( buffer ) ; // write the serialized value try { output . writeObject ( value ) ; } finally { buffer . close ( ) ; } // return the underlying bytes return buffer . toByteArray ( ) ;
public class BitmapUtils { /** * BitmapFactory fails to decode the InputStream . */ public synchronized static Bitmap load ( InputStream is , int width , int height ) { } }
BitmapFactory . Options opt = null ; try { opt = new BitmapFactory . Options ( ) ; if ( width > 0 && height > 0 ) { if ( is . markSupported ( ) ) { is . mark ( is . available ( ) ) ; opt . inJustDecodeBounds = true ; BitmapFactory . decodeStream ( is , null , opt ) ; opt . inSampleSize = calculateInSampleSize ( opt , width , height ) ; is . reset ( ) ; } } opt . inJustDecodeBounds = false ; opt . inPurgeable = true ; opt . inInputShareable = true ; return BitmapFactory . decodeStream ( new FlushedInputStream ( is ) , null , opt ) ; } catch ( Exception e ) { Log . e ( TAG , "" , e ) ; return null ; } finally { if ( is != null ) { try { is . close ( ) ; } catch ( IOException e ) { System . out . print ( e . toString ( ) ) ; } } opt = null ; }
public class PageSnapshot { /** * Highlights WebElement within the page with provided color * and line width . * @ param element WebElement to be highlighted * @ param color color of the line * @ param lineWidth width of the line * @ return instance of type PageSnapshot */ public PageSnapshot highlight ( WebElement element , Color color , int lineWidth ) { } }
try { image = ImageProcessor . highlight ( image , new Coordinates ( element , devicePixelRatio ) , color , lineWidth ) ; } catch ( RasterFormatException rfe ) { throw new ElementOutsideViewportException ( ELEMENT_OUT_OF_VIEWPORT_EX_MESSAGE , rfe ) ; } return this ;
public class NetworkDispatcher { /** * Checks if a response message contains a body . * @ param requestMethod request method * @ param responseCode response status code * @ return whether the response has a body * @ see < a href = " https : / / tools . ietf . org / html / rfc7230 # section - 3.3 " > RFC 7230 section 3.3 < / a > */ public static boolean hasResponseBody ( String requestMethod , int responseCode ) { } }
return requestMethod != Request . Method . HEAD && ! ( 100 <= responseCode && responseCode < HttpURLConnection . HTTP_OK ) && responseCode != HttpURLConnection . HTTP_NO_CONTENT && responseCode != HttpURLConnection . HTTP_RESET && responseCode != HttpURLConnection . HTTP_NOT_MODIFIED ;
public class ClassReader { /** * Completion for classes to be loaded . Before a class is loaded * we make sure its enclosing class ( if any ) is loaded . */ private void complete ( Symbol sym ) throws CompletionFailure { } }
if ( sym . kind == TYP ) { ClassSymbol c = ( ClassSymbol ) sym ; c . members_field = new Scope . ErrorScope ( c ) ; // make sure it ' s always defined annotate . enterStart ( ) ; try { completeOwners ( c . owner ) ; completeEnclosing ( c ) ; } finally { // The flush needs to happen only after annotations // are filled in . annotate . enterDoneWithoutFlush ( ) ; } fillIn ( c ) ; } else if ( sym . kind == PCK ) { PackageSymbol p = ( PackageSymbol ) sym ; try { fillIn ( p ) ; } catch ( IOException ex ) { throw new CompletionFailure ( sym , ex . getLocalizedMessage ( ) ) . initCause ( ex ) ; } } if ( ! filling ) annotate . flush ( ) ; // finish attaching annotations
public class AbstractInstallPlanJob { /** * Install provided extension . * @ param extensionId the identifier of the extension to install * @ param namespace the namespace where to install the extension * @ param parentBranch the children of the parent { @ link DefaultExtensionPlanNode } * @ throws InstallException error when trying to install provided extension */ protected void installExtension ( ExtensionId extensionId , String namespace , DefaultExtensionPlanTree parentBranch ) throws InstallException { } }
installExtension ( extensionId , false , namespace , parentBranch ) ;
public class Hud { /** * Create menus from actions . * @ param parents The parents menu . * @ param actions The actions to create as menu . */ private void createMenus ( Collection < ActionRef > parents , Collection < ActionRef > actions ) { } }
for ( final ActionRef action : actions ) { final Featurable menu = createMenu ( action ) ; if ( ! action . getRefs ( ) . isEmpty ( ) ) { generateSubMenu ( actions , action , menu ) ; } else if ( action . hasCancel ( ) ) { previous . put ( action , parents ) ; generateCancel ( action , menu ) ; } } for ( final Featurable current : menus ) { handler . add ( current ) ; }
public class DefaultDecomposer { /** * ( non - Javadoc ) * @ see org . jsmpp . util . PDUDecomposer # enquireLink ( byte [ ] ) */ public EnquireLink enquireLink ( byte [ ] b ) { } }
EnquireLink req = new EnquireLink ( ) ; assignHeader ( req , b ) ; return req ;
public class Model { /** * Gets attribute value as < code > Float < / code > . * If there is a { @ link Converter } registered for the attribute that converts from Class < code > S < / code > to Class * < code > java . lang . Float < / code > , given the attribute value is an instance of < code > S < / code > , then it will be used , * otherwise performs a conversion using { @ link Convert # toFloat ( Object ) } . * @ param attributeName name of attribute to convert * @ return value converted to < code > Float < / code > */ public Float getFloat ( String attributeName ) { } }
Object value = getRaw ( attributeName ) ; Converter < Object , Float > converter = modelRegistryLocal . converterForValue ( attributeName , value , Float . class ) ; return converter != null ? converter . convert ( value ) : Convert . toFloat ( value ) ;
public class AbstractEngine { /** * Loads a resource from the filesystem into a string . * Verifies if the file last modified date is newer than on the cache * if it is loads into a string * returns the string or the cached value * @ param filename File to read * @ param handler Async handler */ public void read ( final String filename , final AsyncResultHandler < String > handler ) { } }
isFresh ( filename , new Handler < Boolean > ( ) { @ Override public void handle ( Boolean fresh ) { if ( fresh ) { String cachedValue = getFileFromCache ( filename ) ; if ( cachedValue != null ) { handler . handle ( new YokeAsyncResult < > ( null , cachedValue ) ) ; return ; } } // either fresh is false or cachedValue is null loadToCache ( filename , new Handler < Throwable > ( ) { @ Override public void handle ( Throwable error ) { if ( error != null ) { handler . handle ( new YokeAsyncResult < String > ( error , null ) ) ; return ; } // no error handler . handle ( new YokeAsyncResult < > ( null , getFileFromCache ( filename ) ) ) ; } } ) ; } } ) ;
public class RemoteMongoDatabaseImpl { /** * Gets a collection . * @ param collectionName the name of the collection to return * @ return the collection */ public RemoteMongoCollection < Document > getCollection ( final String collectionName ) { } }
return new RemoteMongoCollectionImpl < > ( proxy . getCollection ( collectionName ) , dispatcher ) ;
public class RoaringBitmap { /** * Checks whether the two bitmaps intersect . This can be much faster than calling " and " and * checking the cardinality of the result . * @ param x1 first bitmap * @ param x2 other bitmap * @ return true if they intersect */ public static boolean intersects ( final RoaringBitmap x1 , final RoaringBitmap x2 ) { } }
final int length1 = x1 . highLowContainer . size ( ) , length2 = x2 . highLowContainer . size ( ) ; int pos1 = 0 , pos2 = 0 ; while ( pos1 < length1 && pos2 < length2 ) { final short s1 = x1 . highLowContainer . getKeyAtIndex ( pos1 ) ; final short s2 = x2 . highLowContainer . getKeyAtIndex ( pos2 ) ; if ( s1 == s2 ) { final Container c1 = x1 . highLowContainer . getContainerAtIndex ( pos1 ) ; final Container c2 = x2 . highLowContainer . getContainerAtIndex ( pos2 ) ; if ( c1 . intersects ( c2 ) ) { return true ; } ++ pos1 ; ++ pos2 ; } else if ( Util . compareUnsigned ( s1 , s2 ) < 0 ) { // s1 < s2 pos1 = x1 . highLowContainer . advanceUntil ( s2 , pos1 ) ; } else { // s1 > s2 pos2 = x2 . highLowContainer . advanceUntil ( s1 , pos2 ) ; } } return false ;
public class FLACEncoder { /** * Tell encoder how many threads to use for encoding . More threads than this * will exist , but only the given amount should be in a running state at * any moment ( the other threads are simply manager threads , waiting for * encoding - threads to end ) . A special case is setting " count " to zero ; this * will tell the encoder not to use internal threads at all , and all * encoding will be done with the main thread . Otherwise , any encode methods * will return while the encode actually takes place in a separate thread . * @ param count Number of encoding threads to use . Count & gt ; 0 means use that * many independent encoding threads , count = = 0 means encode in main thread , * count & lt ; 0 is ignored . * @ return boolean value represents whether requested count was applied or * not . This may be false if a FLAC stream is currently opened . */ public boolean setThreadCount ( int count ) { } }
boolean result = false ; if ( count < 0 || flacStreamIsOpen ) return false ; streamLock . lock ( ) ; try { if ( flacStreamIsOpen ) result = false ; else { MAX_THREADED_FRAMES = count ; prepareThreadManager ( streamConfig ) ; result = true ; } } finally { streamLock . unlock ( ) ; } return result ;
public class QueryExecutorImpl { /** * Sends data during a live COPY IN operation . Only unlocks the connection if server suddenly * returns CommandComplete , which should not happen * @ param op the CopyIn operation presumably currently holding lock on this connection * @ param data bytes to send * @ param off index of first byte to send ( usually 0) * @ param siz number of bytes to send ( usually data . length ) * @ throws SQLException on failure */ public synchronized void writeToCopy ( CopyOperationImpl op , byte [ ] data , int off , int siz ) throws SQLException { } }
if ( ! hasLock ( op ) ) { throw new PSQLException ( GT . tr ( "Tried to write to an inactive copy operation" ) , PSQLState . OBJECT_NOT_IN_STATE ) ; } LOGGER . log ( Level . FINEST , " FE=> CopyData({0})" , siz ) ; try { pgStream . sendChar ( 'd' ) ; pgStream . sendInteger4 ( siz + 4 ) ; pgStream . send ( data , off , siz ) ; processCopyResults ( op , false ) ; // collect any pending notifications without blocking } catch ( IOException ioe ) { throw new PSQLException ( GT . tr ( "Database connection failed when writing to copy" ) , PSQLState . CONNECTION_FAILURE , ioe ) ; }
public class Util { /** * Splits a string into an array using provided delimiter . Empty ( but not blank ) split chunks are omitted . * The split chunks are trimmed . * @ param input string to split . * @ param delimiter delimiter * @ return a string split into an array using a provided delimiter */ public static String [ ] split ( String input , char delimiter ) { } }
return split ( input , String . valueOf ( delimiter ) ) ;
public class CharacterBasedGenerativeModel { /** * 求概率 * @ param s1 前2个字 * @ param s1 前2个状态的下标 * @ param s2 前1个字 * @ param s2 前1个状态的下标 * @ param s3 当前字 * @ param s3 当前状态的下标 * @ return 序列的概率 */ double log_prob ( char s1 , int i1 , char s2 , int i2 , char s3 , int i3 ) { } }
if ( transMatrix [ i1 ] [ i2 ] [ i3 ] == 0 ) return inf ; char t1 = id2tag [ i1 ] ; char t2 = id2tag [ i2 ] ; char t3 = id2tag [ i3 ] ; double uni = l1 * tf . freq ( s3 , t3 ) ; double bi = div ( l2 * tf . get ( s2 , t2 , s3 , t3 ) , tf . get ( s2 , t2 ) ) ; double tri = div ( l3 * tf . get ( s1 , t1 , s2 , t2 , s3 , t3 ) , tf . get ( s1 , t1 , s2 , t2 ) ) ; if ( uni + bi + tri == 0 ) return inf ; return Math . log ( uni + bi + tri ) ;
public class BackupWorkloadItemsInner { /** * Provides a pageable list of workload item of a specific container according to the query filter and the pagination parameters . * @ param vaultName The name of the recovery services vault . * @ param resourceGroupName The name of the resource group where the recovery services vault is present . * @ param fabricName Fabric name associated with the container . * @ param containerName Name of the container . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; WorkloadItemResourceInner & gt ; object */ public Observable < Page < WorkloadItemResourceInner > > listAsync ( final String vaultName , final String resourceGroupName , final String fabricName , final String containerName ) { } }
return listWithServiceResponseAsync ( vaultName , resourceGroupName , fabricName , containerName ) . map ( new Func1 < ServiceResponse < Page < WorkloadItemResourceInner > > , Page < WorkloadItemResourceInner > > ( ) { @ Override public Page < WorkloadItemResourceInner > call ( ServiceResponse < Page < WorkloadItemResourceInner > > response ) { return response . body ( ) ; } } ) ;
public class DefaultBatchManager { /** * / * ( non - Javadoc ) * @ see org . jboss . as . cli . batch . BatchManager # holdbackActiveBatch ( java . lang . String ) */ @ Override public boolean holdbackActiveBatch ( String name ) { } }
if ( activeBatch == null ) { return false ; } if ( batches . containsKey ( name ) ) { return false ; } if ( batches . isEmpty ( ) ) { batches = new HashMap < String , DefaultBatch > ( ) ; } batches . put ( name , activeBatch ) ; activeBatch = null ; return true ;
public class ConfigurationPropertyName { /** * Returns { @ code true } if this element is an ancestor ( immediate or nested parent ) of * the specified name . * @ param name the name to check * @ return { @ code true } if this name is an ancestor */ public boolean isAncestorOf ( ConfigurationPropertyName name ) { } }
Assert . notNull ( name , "Name must not be null" ) ; if ( this . getNumberOfElements ( ) >= name . getNumberOfElements ( ) ) { return false ; } return elementsEqual ( name ) ;
public class BaseProviderRule { /** * scan all methods for @ Pact annotation and execute them , if not already initialized * @ param fragment */ protected Map < String , RequestResponsePact > getPacts ( String fragment ) { } }
if ( pacts == null ) { pacts = new HashMap < > ( ) ; for ( Method m : target . getClass ( ) . getMethods ( ) ) { if ( JUnitTestSupport . conformsToSignature ( m ) && methodMatchesFragment ( m , fragment ) ) { Pact pactAnnotation = m . getAnnotation ( Pact . class ) ; if ( StringUtils . isEmpty ( pactAnnotation . provider ( ) ) || provider . equals ( pactAnnotation . provider ( ) ) ) { PactDslWithProvider dslBuilder = ConsumerPactBuilder . consumer ( pactAnnotation . consumer ( ) ) . hasPactWith ( provider ) ; updateAnyDefaultValues ( dslBuilder ) ; try { RequestResponsePact pact = ( RequestResponsePact ) m . invoke ( target , dslBuilder ) ; pacts . put ( provider , pact ) ; } catch ( Exception e ) { throw new RuntimeException ( "Failed to invoke pact method" , e ) ; } } } } } return pacts ;
public class SVGPlot { /** * Save document into a SVG file . * References PNG images from the temporary files will be inlined * automatically . * @ param file Output filename * @ throws IOException On write errors * @ throws TransformerFactoryConfigurationError Transformation error * @ throws TransformerException Transformation error */ public void saveAsSVG ( File file ) throws IOException , TransformerFactoryConfigurationError , TransformerException { } }
OutputStream out = new BufferedOutputStream ( new FileOutputStream ( file ) ) ; // TODO embed linked images . javax . xml . transform . Result result = new StreamResult ( out ) ; SVGDocument doc = cloneDocument ( ) ; // Use a transformer for pretty printing Transformer xformer = TransformerFactory . newInstance ( ) . newTransformer ( ) ; xformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; xformer . transform ( new DOMSource ( doc ) , result ) ; out . flush ( ) ; out . close ( ) ;
public class CmsTextArea { /** * Updates the content of the internal textarea . < p > * @ param value the new content */ private void updateTextArea ( String value ) { } }
String oldValue = m_textArea . getValue ( ) ; if ( ! oldValue . equals ( value ) ) { int l1 = oldValue . split ( "\n" ) . length ; int l2 = value . split ( "\n" ) . length ; m_textArea . setValue ( value ) ; if ( l1 != l2 ) { scheduleResize ( ) ; } }
public class BusinessUtils { /** * Returns the identifier class for an aggregate root class . */ @ SuppressWarnings ( "unchecked" ) public static < A extends AggregateRoot < I > , I > Class < I > resolveAggregateIdClass ( Class < A > aggregateRootClass ) { } }
checkNotNull ( aggregateRootClass , "aggregateRootClass should not be null" ) ; return ( Class < I > ) resolveGenerics ( AggregateRoot . class , aggregateRootClass ) [ 0 ] ;
public class AnalysisCacheToRepositoryAdapter { /** * ( non - Javadoc ) * @ see org . apache . bcel . util . Repository # findClass ( java . lang . String ) */ @ Override public JavaClass findClass ( String className ) { } }
@ SlashedClassName String slashedClassName = ClassName . toSlashedClassName ( className ) ; ClassDescriptor classDescriptor = DescriptorFactory . instance ( ) . getClassDescriptor ( slashedClassName ) ; return Global . getAnalysisCache ( ) . probeClassAnalysis ( JavaClass . class , classDescriptor ) ;
public class MemoryMapArchiveBase { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . api . Archive # getContent ( ) */ @ Override public Map < ArchivePath , Node > getContent ( ) { } }
Map < ArchivePath , Node > ret = new LinkedHashMap < ArchivePath , Node > ( ) ; for ( Map . Entry < ArchivePath , NodeImpl > item : content . entrySet ( ) ) { if ( ! item . getKey ( ) . equals ( new BasicPath ( "/" ) ) ) { ret . put ( item . getKey ( ) , item . getValue ( ) ) ; } } return Collections . unmodifiableMap ( ret ) ;
public class OptionalWeak { /** * Shortcut for as ( Map . class ) , with malleable generic type * @ see OptionalWeak # as ( Class ) */ public < K , V > Optional < Map < K , V > > asMap ( ) { } }
return inner . filter ( d -> d . isMap ( ) ) . map ( d -> d . asMap ( ) ) ;
public class TransactionLogger { /** * Write ' properties ' map to given log in given level - with pipe separator between each entry * Write exception stack trace to ' logger ' in ' error ' level , if not empty * @ param logger * @ param level - of logging */ protected void writePropertiesToLog ( Logger logger , Level level ) { } }
writeToLog ( logger , level , getMapAsString ( this . properties , separator ) , null ) ; if ( this . exception != null ) { writeToLog ( this . logger , Level . ERROR , "Error:" , this . exception ) ; }
public class CmsLocaleGroupService { /** * Smarter method to connect a resource to a locale group . < p > * Exactly one of the resources given as an argument must represent a locale group , while the other should * be the locale that you wish to attach to the locale group . < p > * @ param first a resource * @ param second a resource * @ throws CmsException if something goes wrong */ public void attachLocaleGroupIndirect ( CmsResource first , CmsResource second ) throws CmsException { } }
CmsResource firstResourceCorrected = getDefaultFileOrSelf ( first ) ; CmsResource secondResourceCorrected = getDefaultFileOrSelf ( second ) ; if ( ( firstResourceCorrected == null ) || ( secondResourceCorrected == null ) ) { throw new IllegalArgumentException ( "no default file" ) ; } CmsLocaleGroup group1 = readLocaleGroup ( firstResourceCorrected ) ; CmsLocaleGroup group2 = readLocaleGroup ( secondResourceCorrected ) ; int numberOfRealGroups = ( group1 . isRealGroupOrPotentialGroupHead ( ) ? 1 : 0 ) + ( group2 . isRealGroupOrPotentialGroupHead ( ) ? 1 : 0 ) ; if ( numberOfRealGroups != 1 ) { throw new IllegalArgumentException ( "more than one real groups" ) ; } CmsResource main = null ; CmsResource secondary = null ; if ( group1 . isRealGroupOrPotentialGroupHead ( ) ) { main = group1 . getPrimaryResource ( ) ; secondary = group2 . getPrimaryResource ( ) ; } else if ( group2 . isRealGroupOrPotentialGroupHead ( ) ) { main = group2 . getPrimaryResource ( ) ; secondary = group1 . getPrimaryResource ( ) ; } attachLocaleGroup ( secondary , main ) ;
public class ConfigArgP { /** * Applies the properties from the named source to the main configuration * @ param config the main configuration to apply to * @ param source the name of the source to apply properties from */ protected static void loadConfigSource ( ConfigArgP config , String source ) { } }
Properties p = loadConfig ( source ) ; Config c = config . getConfig ( ) ; for ( String key : p . stringPropertyNames ( ) ) { String value = p . getProperty ( key ) ; ConfigurationItem ci = config . getConfigurationItem ( key ) ; if ( ci != null ) { // if we recognize the key , validate it ci . setValue ( processConfigValue ( value ) ) ; } c . overrideConfig ( key , processConfigValue ( value ) ) ; }
public class CmsDependencyIconActionType { /** * Parses an string into an element of this enumeration . < p > * @ param value the id to parse * @ return the enumeration element * @ throws CmsIllegalArgumentException if the given value could not be matched against a * < code > { @ link CmsDependencyIconActionType } < / code > type . */ public static CmsDependencyIconActionType valueOf ( String value ) throws CmsIllegalArgumentException { } }
Iterator < CmsDependencyIconActionType > iter = VALUES . iterator ( ) ; while ( iter . hasNext ( ) ) { CmsDependencyIconActionType target = iter . next ( ) ; if ( value . equals ( target . getId ( ) ) ) { return target ; } } throw new CmsIllegalArgumentException ( org . opencms . db . Messages . get ( ) . container ( org . opencms . db . Messages . ERR_MODE_ENUM_PARSE_2 , value , CmsDependencyIconActionType . class . getName ( ) ) ) ;
public class FlinkKafkaProducerBase { /** * Used for testing only . */ @ VisibleForTesting protected < K , V > KafkaProducer < K , V > getKafkaProducer ( Properties props ) { } }
return new KafkaProducer < > ( props ) ;
public class FrameworkManager { /** * Create and start a new instance of an OSGi framework using the provided * properties as framework properties . */ protected Framework startFramework ( BootstrapConfig config ) throws BundleException { } }
// Set the default startlevel of the framework . We want the framework to // start at our bootstrap level ( i . e . Framework bundle itself will start , and // it will pre - load and re - start any previously known bundles in the // bootstrap start level ) . config . put ( org . osgi . framework . Constants . FRAMEWORK_BEGINNING_STARTLEVEL , Integer . toString ( KernelStartLevel . OSGI_INIT . getLevel ( ) ) ) ; fwkClassloader = config . getFrameworkClassloader ( ) ; FrameworkFactory fwkFactory = FrameworkConfigurator . getFrameworkFactory ( fwkClassloader ) ; // Initialize the framework to create a valid system bundle context // Start the shutdown monitor ( before we start any bundles ) // This exception will have a translated message stating that an unknown exception occurred . // This is so bizarre a case that it should never happen . try { Framework fwk = fwkFactory . newFramework ( config . getFrameworkProperties ( ) ) ; if ( fwk == null ) return null ; fwk . start ( ) ; return fwk ; } catch ( BundleException ex ) { throw ex ; } catch ( RuntimeException ex ) { // Try to diagnose this exception . If it ' s something we know about , we will log an error and // return null here ( which will result in a general " Failed to start the framework " error message // higher up . ) Otherwise , just throw the exception if ( ! handleEquinoxRuntimeException ( ex ) ) throw ex ; return null ; }
public class WaitConditionBuilder { /** * The total length of milliseconds to wait on the condition to be satisfied * @ param milliseconds * @ return */ public S ms ( Long milliseconds ) { } }
builder . milliseconds ( String . valueOf ( milliseconds ) ) ; return self ;
public class MBeanHelper { /** * toObnString takes in any string value and replaces any of the reserved ObjectName * chars with periods ( . ) . The 4 reserved ObjectName chars are : < br > * @ param s The string to be converted to an ObjectName - safe string . */ public static String toObnString ( String s ) { } }
if ( s != null && ! s . isEmpty ( ) ) return s . replace ( ':' , '.' ) . replace ( '=' , '.' ) . replace ( ',' , '.' ) . replace ( '"' , '.' ) ; return s ;
public class MemorySession { /** * Check if attribute is swappable as defined by J2EE */ protected boolean isSwappableData ( Object obj ) { } }
if ( obj != null && ( obj instanceof Serializable || obj instanceof Externalizable ) ) { return true ; } return false ;
public class PrcWebstorePage { /** * < p > Set filters / orders / has goods / services . . . * for all sub - catalogs same as main - catalog . < / p > * @ param pMainCatalog main catalog * @ throws Exception an Exception */ public final void propagateCatalogSettings ( final TradingCatalog pMainCatalog ) throws Exception { } }
for ( TradingCatalog tc : pMainCatalog . getSubcatalogs ( ) ) { // copy filters / specifics : tc . getCatalog ( ) . setUseAvailableFilter ( pMainCatalog . getCatalog ( ) . getUseAvailableFilter ( ) ) ; tc . getCatalog ( ) . setUseFilterSpecifics ( pMainCatalog . getCatalog ( ) . getUseFilterSpecifics ( ) ) ; tc . getCatalog ( ) . setUseFilterSubcatalog ( pMainCatalog . getCatalog ( ) . getUseFilterSubcatalog ( ) ) ; tc . getCatalog ( ) . setUsePickupPlaceFilter ( pMainCatalog . getCatalog ( ) . getUsePickupPlaceFilter ( ) ) ; tc . getCatalog ( ) . setUsedSpecifics ( pMainCatalog . getCatalog ( ) . getUsedSpecifics ( ) ) ; tc . getCatalog ( ) . setHasGoods ( pMainCatalog . getCatalog ( ) . getHasGoods ( ) ) ; tc . getCatalog ( ) . setHasServices ( pMainCatalog . getCatalog ( ) . getHasServices ( ) ) ; tc . getCatalog ( ) . setHasSeGoods ( pMainCatalog . getCatalog ( ) . getHasSeGoods ( ) ) ; tc . getCatalog ( ) . setHasSeServices ( pMainCatalog . getCatalog ( ) . getHasSeServices ( ) ) ; if ( tc . getSubcatalogs ( ) . size ( ) > 0 ) { // recursion : propagateCatalogSettings ( tc ) ; } }
public class CharsetDetector { /** * Autodetect the charset of an inputStream , and return a Java Reader * to access the converted input data . * This is a convenience method that is equivalent to * < code > this . setDeclaredEncoding ( declaredEncoding ) . setText ( in ) . detect ( ) . getReader ( ) ; < / code > * For the input stream that supplies the character data , markSupported ( ) * must be true ; the charset detection will read a small amount of data , * then return the stream to its original position via * the InputStream . reset ( ) operation . The exact amount that will * be read depends on the characteristics of the data itself . * Raise an exception if no charsets appear to match the input data . * @ param in The source of the byte data in the unknown charset . * @ param declaredEncoding A declared encoding for the data , if available , * or null or an empty string if none is available . */ public Reader getReader ( InputStream in , String declaredEncoding ) { } }
fDeclaredEncoding = declaredEncoding ; try { setText ( in ) ; CharsetMatch match = detect ( ) ; if ( match == null ) { return null ; } return match . getReader ( ) ; } catch ( IOException e ) { return null ; }
public class AbstractCalculator { /** * Append operator and number to expression * @ param operator * @ param value * @ return */ protected final CALC operator ( Class < ? extends Operator > operator , Object value ) { } }
Num tmp = null ; if ( value instanceof Num ) tmp = ( Num ) value ; else tmp = new Num ( value ) ; infix . add ( CacheExtension . getOperator ( operator ) ) ; infix . add ( tmp ) ; return getThis ( ) ;
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 1022:1 : ruleAndExpression returns [ EObject current = null ] : ( this _ NotExpression _ 0 = ruleNotExpression ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * ) ; */ public final EObject ruleAndExpression ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_NotExpression_0 = null ; EObject lv_right_3_0 = null ; enterRule ( ) ; try { // InternalSimpleAntlr . g : 1025:28 : ( ( this _ NotExpression _ 0 = ruleNotExpression ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * ) ) // InternalSimpleAntlr . g : 1026:1 : ( this _ NotExpression _ 0 = ruleNotExpression ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * ) { // InternalSimpleAntlr . g : 1026:1 : ( this _ NotExpression _ 0 = ruleNotExpression ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * ) // InternalSimpleAntlr . g : 1027:2 : this _ NotExpression _ 0 = ruleNotExpression ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * { if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAndExpressionAccess ( ) . getNotExpressionParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; this_NotExpression_0 = ruleNotExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_NotExpression_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalSimpleAntlr . g : 1038:1 : ( ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) ) * loop20 : do { int alt20 = 2 ; int LA20_0 = input . LA ( 1 ) ; if ( ( LA20_0 == 32 ) ) { alt20 = 1 ; } switch ( alt20 ) { case 1 : // InternalSimpleAntlr . g : 1038:2 : ( ) otherlv _ 2 = ' & & ' ( ( lv _ right _ 3_0 = ruleNotExpression ) ) { // InternalSimpleAntlr . g : 1038:2 : ( ) // InternalSimpleAntlr . g : 1039:2: { if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getAndExpressionAccess ( ) . getAndExpressionLeftAction_1_0 ( ) , current ) ; } } otherlv_2 = ( Token ) match ( input , 32 , FOLLOW_20 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getAndExpressionAccess ( ) . getAmpersandAmpersandKeyword_1_1 ( ) ) ; } // InternalSimpleAntlr . g : 1051:1 : ( ( lv _ right _ 3_0 = ruleNotExpression ) ) // InternalSimpleAntlr . g : 1052:1 : ( lv _ right _ 3_0 = ruleNotExpression ) { // InternalSimpleAntlr . g : 1052:1 : ( lv _ right _ 3_0 = ruleNotExpression ) // InternalSimpleAntlr . g : 1053:3 : lv _ right _ 3_0 = ruleNotExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAndExpressionAccess ( ) . getRightNotExpressionParserRuleCall_1_2_0 ( ) ) ; } pushFollow ( FOLLOW_26 ) ; lv_right_3_0 = ruleNotExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getAndExpressionRule ( ) ) ; } set ( current , "right" , lv_right_3_0 , "org.eclipse.xtext.generator.parser.antlr.debug.SimpleAntlr.NotExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop20 ; } } while ( true ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class AopUtils { /** * Gets declared method from specified type by mame and parameters types . * @ param type the type * @ param methodName the name of the method * @ param parameterTypes the parameter array * @ return a { @ link Method } object or null if method doesn ' t exist */ public static Method getDeclaredMethod ( Class < ? > type , String methodName , Class < ? > ... parameterTypes ) { } }
Method method = null ; try { method = type . getDeclaredMethod ( methodName , parameterTypes ) ; if ( method . isBridge ( ) ) { method = MethodProvider . getInstance ( ) . unbride ( method , type ) ; } } catch ( NoSuchMethodException e ) { Class < ? > superclass = type . getSuperclass ( ) ; if ( superclass != null ) { method = getDeclaredMethod ( superclass , methodName , parameterTypes ) ; } } catch ( ClassNotFoundException e ) { Throwables . propagate ( e ) ; } catch ( IOException e ) { Throwables . propagate ( e ) ; } return method ;
public class Widget { /** * Gets Widget bounds height * @ return height */ public float getBoundsHeight ( ) { } }
if ( mSceneObject != null ) { GVRSceneObject . BoundingVolume v = mSceneObject . getBoundingVolume ( ) ; return v . maxCorner . y - v . minCorner . y ; } return 0f ;
public class ParagraphBuilder { /** * Create a span in the current paragraph . * @ param text the text * @ return this for fluent style */ public ParagraphBuilder span ( final String text ) { } }
final ParagraphElement paragraphElement = new Span ( text ) ; this . paragraphElements . add ( paragraphElement ) ; return this ;
public class MultipartRequestInputStream { /** * 跳过边界表示 * @ return 跳过的字节数 */ public int skipToBoundary ( ) throws IOException { } }
int count = 0 ; while ( true ) { byte b = readByte ( ) ; count ++ ; if ( isBoundary ( b ) ) { break ; } } return count ;
public class AES256JNCryptorInputStream { /** * Updates the HMAC value and handles the end of stream . * @ param b * the result of a read operation * @ return the value { @ code b } * @ throws IOException * @ throws StreamIntegrityException */ private int completeRead ( int b ) throws IOException , StreamIntegrityException { } }
if ( b == END_OF_STREAM ) { handleEndOfStream ( ) ; } else { // Have we reached the end of the stream ? int c = pushbackInputStream . read ( ) ; if ( c == END_OF_STREAM ) { handleEndOfStream ( ) ; } else { pushbackInputStream . unread ( c ) ; } } return b ;
public class JaxbHelper { /** * Creates an instance by reading the XML from a reader . * @ param reader * Reader to use . * @ param jaxbContext * Context to use . * @ return New instance . * @ throws UnmarshalObjectException * Error deserializing the object . * @ param < TYPE > * Type of the created object . */ @ SuppressWarnings ( "unchecked" ) @ NotNull public < TYPE > TYPE create ( @ NotNull final Reader reader , @ NotNull final JAXBContext jaxbContext ) throws UnmarshalObjectException { } }
Contract . requireArgNotNull ( "reader" , reader ) ; Contract . requireArgNotNull ( "jaxbContext" , jaxbContext ) ; try { final Unmarshaller unmarshaller = jaxbContext . createUnmarshaller ( ) ; final TYPE obj = ( TYPE ) unmarshaller . unmarshal ( reader ) ; return obj ; } catch ( final JAXBException ex ) { throw new UnmarshalObjectException ( "Unable to parse XML from reader" , ex ) ; }
public class ErrorLogger { /** * Write an info message to the logs * @ param message The information message */ public void info ( final String message ) { } }
messages . add ( new LogMessage ( message , LogMessage . Type . INFO ) ) ; log . info ( message ) ;
public class PackratParser { /** * This is the actual parser start . After running the parse , a check is applied * to check for full parsing or partial parsing . If partial parsing is found , an * exception is thrown . * @ param sourceCode * is the source code to be parser . * @ param production * is the name of the production to be used as root production for * the parse process . * @ return A { @ link ParseTreeNode } is returned with the parser result . * @ throws ParserException * is thrown in case the parser could not parse the source . */ public ParseTreeNode parse ( SourceCode sourceCode , String production ) throws ParserException { } }
try { initialize ( sourceCode ) ; MemoEntry progress = applyRule ( production , 0 , 1 ) ; if ( progress . getDeltaPosition ( ) != text . length ( ) ) { throw new ParserException ( getParserErrorMessage ( ) ) ; } Object answer = progress . getAnswer ( ) ; if ( answer instanceof Status ) { Status status = ( Status ) answer ; switch ( status ) { case FAILED : throw new ParserException ( "Parser returned status 'FAILED'." ) ; default : throw new RuntimeException ( "A status '" + status . toString ( ) + "' is not expected here." ) ; } } ParseTreeNode parserTree = ( ParseTreeNode ) answer ; normalizeParents ( parserTree ) ; return parserTree ; } catch ( TreeException e ) { throw new ParserException ( e ) ; }
public class Futures { /** * Same as exceptionallyExpecting ( ) , except that it allows executing / returning a Future as a result in case of an * expected exception . * If such an exception is caught , the given exceptionFutureSupplier is invoked and its result is then returned . * All other Exceptions will be re - thrown . * @ param future The original CompletableFuture to attach to . * @ param isExpected A Predicate that can check whether an Exception is expected or not . * @ param exceptionFutureSupplier A Supplier that returns a CompletableFuture which will be invoked in case the thrown * Exception if of type exceptionClass . * @ param < T > The Type of the Future ' s result . * @ return A new CompletableFuture that will complete either : * - With the same result as the original Future if that one completed normally * - With exceptionValue if the original Future completed with an expected exception . * - Exceptionally with the original Future ' s exception if none of the above are true . */ public static < T > CompletableFuture < T > exceptionallyComposeExpecting ( CompletableFuture < T > future , Predicate < Throwable > isExpected , Supplier < CompletableFuture < T > > exceptionFutureSupplier ) { } }
return exceptionallyCompose ( future , ex -> { if ( isExpected . test ( Exceptions . unwrap ( ex ) ) ) { return exceptionFutureSupplier . get ( ) ; } else { return Futures . failedFuture ( ex ) ; } } ) ;
public class SHA1 { /** * 串接arr参数 , 生成sha1 digest * @ param arr * @ return */ public static String gen ( String ... arr ) throws NoSuchAlgorithmException { } }
Arrays . sort ( arr ) ; StringBuilder sb = new StringBuilder ( ) ; for ( String a : arr ) { sb . append ( a ) ; } return DigestUtils . sha1Hex ( sb . toString ( ) ) ;
public class WTemplate { /** * Set a predefined template engine . If null then the default engine is used . * @ param templateEngine the provided template engine or null to use the default engine */ public void setEngineName ( final TemplateRendererFactory . TemplateEngine templateEngine ) { } }
setEngineName ( templateEngine == null ? null : templateEngine . getEngineName ( ) ) ;
public class ListMembersRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListMembersRequest listMembersRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listMembersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listMembersRequest . getDetectorId ( ) , DETECTORID_BINDING ) ; protocolMarshaller . marshall ( listMembersRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listMembersRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listMembersRequest . getOnlyAssociated ( ) , ONLYASSOCIATED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ConfigWebUtil { /** * touch a file object by the string definition * @ param config * @ param directory * @ param path * @ param type * @ return matching file */ public static Resource getFile ( Config config , Resource directory , String path , short type ) { } }
path = replacePlaceholder ( path , config ) ; if ( ! StringUtil . isEmpty ( path , true ) ) { Resource file = getFile ( directory . getRealResource ( path ) , type ) ; if ( file != null ) return file ; file = getFile ( config . getResource ( path ) , type ) ; if ( file != null ) return file ; } return null ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcStructuralSurfaceActivityTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class AbstractExecutableMemberWriter { /** * For backward compatibility , include an anchor using the erasures of the * parameters . NOTE : We won ' t need this method anymore after we fix * see tags so that they use the type instead of the erasure . * @ param executableElement the ExecutableElement to anchor to . * @ return the 1.4 . x style anchor for the executable element . */ protected String getErasureAnchor ( ExecutableElement executableElement ) { } }
final StringBuilder buf = new StringBuilder ( name ( executableElement ) + "(" ) ; List < ? extends VariableElement > parameters = executableElement . getParameters ( ) ; boolean foundTypeVariable = false ; for ( int i = 0 ; i < parameters . size ( ) ; i ++ ) { if ( i > 0 ) { buf . append ( "," ) ; } TypeMirror t = parameters . get ( i ) . asType ( ) ; SimpleTypeVisitor9 < Boolean , Void > stv = new SimpleTypeVisitor9 < Boolean , Void > ( ) { boolean foundTypeVariable = false ; @ Override public Boolean visitArray ( ArrayType t , Void p ) { visit ( t . getComponentType ( ) ) ; buf . append ( utils . getDimension ( t ) ) ; return foundTypeVariable ; } @ Override public Boolean visitTypeVariable ( TypeVariable t , Void p ) { buf . append ( utils . asTypeElement ( t ) . getQualifiedName ( ) ) ; foundTypeVariable = true ; return foundTypeVariable ; } @ Override public Boolean visitDeclared ( DeclaredType t , Void p ) { buf . append ( utils . getQualifiedTypeName ( t ) ) ; return foundTypeVariable ; } @ Override protected Boolean defaultAction ( TypeMirror e , Void p ) { buf . append ( e ) ; return foundTypeVariable ; } } ; boolean isTypeVariable = stv . visit ( t ) ; if ( ! foundTypeVariable ) { foundTypeVariable = isTypeVariable ; } } buf . append ( ")" ) ; return foundTypeVariable ? writer . getName ( buf . toString ( ) ) : null ;
public class Connection { /** * { @ inheritDoc } */ public Statement createStatement ( final int resultSetType , final int resultSetConcurrency ) throws SQLException { } }
if ( resultSetType != ResultSet . TYPE_FORWARD_ONLY ) { throw new SQLFeatureNotSupportedException ( "Unsupported result set type" ) ; } // end of if if ( resultSetConcurrency != ResultSet . CONCUR_READ_ONLY ) { throw new SQLFeatureNotSupportedException ( "Unsupported result set concurrency" ) ; } // end of if return createStatement ( ) ;
public class LinearSolverChol_ZDRM { /** * Sets the matrix to the inverse using a lower triangular matrix . */ public void setToInverseL ( double a [ ] ) { } }
// the more direct method which takes full advantage of the sparsity of the data structures proved to // be difficult to get right due to the conjugates and reordering . // See comparable real number code for an example . for ( int col = 0 ; col < n ; col ++ ) { Arrays . fill ( vv , 0 ) ; vv [ col * 2 ] = 1 ; TriangularSolver_ZDRM . solveL_diagReal ( t , vv , n ) ; TriangularSolver_ZDRM . solveConjTranL_diagReal ( t , vv , n ) ; for ( int i = 0 ; i < n ; i ++ ) { a [ ( i * numCols + col ) * 2 ] = vv [ i * 2 ] ; a [ ( i * numCols + col ) * 2 + 1 ] = vv [ i * 2 + 1 ] ; } } // NOTE : If you want to make inverse faster take advantage of the sparsity
public class JMMap { /** * Build entry stream stream . * @ param < K > the type parameter * @ param < V > the type parameter * @ param map the map * @ return the stream */ public static < K , V > Stream < Entry < K , V > > buildEntryStream ( Map < K , V > map ) { } }
return map . entrySet ( ) . stream ( ) ;
public class TrieNode { /** * Gets depth . * @ return the depth */ public short getDepth ( ) { } }
if ( 0 == index ) return 0 ; if ( - 1 == depth ) { synchronized ( this ) { if ( - 1 == depth ) { TrieNode parent = getParent ( ) ; assert ( null == parent || parent . index < index ) ; depth = ( short ) ( null == parent ? 0 : ( parent . getDepth ( ) + 1 ) ) ; } } } return depth ;
public class SessionUtil { /** * Helper function to parse a JsonNode from a GS response * containing CommonParameters , emitting an EnumMap of parameters * @ param paramsNode parameters in JSON form * @ return map object including key and value pairs */ public static Map < String , Object > getCommonParams ( JsonNode paramsNode ) { } }
Map < String , Object > parameters = new HashMap < > ( ) ; for ( JsonNode child : paramsNode ) { // If there isn ' t a name then the response from GS must be erroneous . if ( ! child . hasNonNull ( "name" ) ) { logger . error ( "Common Parameter JsonNode encountered with " + "no parameter name!" ) ; continue ; } // Look up the parameter based on the " name " attribute of the node . String paramName = child . path ( "name" ) . asText ( ) ; // What type of value is it and what ' s the value ? if ( ! child . hasNonNull ( "value" ) ) { logger . debug ( "No value found for Common Parameter {}" , child . path ( "name" ) . asText ( ) ) ; continue ; } if ( STRING_PARAMS . contains ( paramName . toUpperCase ( ) ) ) { parameters . put ( paramName , child . path ( "value" ) . asText ( ) ) ; } else if ( INT_PARAMS . contains ( paramName . toUpperCase ( ) ) ) { parameters . put ( paramName , child . path ( "value" ) . asInt ( ) ) ; } else if ( BOOLEAN_PARAMS . contains ( paramName . toUpperCase ( ) ) ) { parameters . put ( paramName , child . path ( "value" ) . asBoolean ( ) ) ; } else { logger . debug ( "Unknown Common Parameter: {}" , paramName ) ; } logger . debug ( "Parameter {}: {}" , paramName , child . path ( "value" ) . asText ( ) ) ; } return parameters ;
public class JDBCTableSiteIdentifier { /** * Insert a new site into the site table and into this mapping . */ public Site insertNewSite ( String siteString ) throws PersistenceException { } }
if ( _sitesByString . containsKey ( siteString ) ) { return null ; } // add it to the db Site site = new Site ( ) ; site . siteString = siteString ; _repo . insertNewSite ( site ) ; // add it to our two mapping tables , taking care to avoid causing enumerateSites ( ) to choke @ SuppressWarnings ( "unchecked" ) HashMap < String , Site > newStrings = ( HashMap < String , Site > ) _sitesByString . clone ( ) ; HashIntMap < Site > newIds = _sitesById . clone ( ) ; newIds . put ( site . siteId , site ) ; newStrings . put ( site . siteString , site ) ; _sitesByString = newStrings ; _sitesById = newIds ; return site ;
public class Output { /** * { @ inheritDoc } */ @ Override public void writeBoolean ( Boolean bol ) { } }
writeAMF3 ( ) ; buf . put ( bol ? AMF3 . TYPE_BOOLEAN_TRUE : AMF3 . TYPE_BOOLEAN_FALSE ) ;
public class JDBCBlobClient { /** * Writes all or part of the given < code > byte < / code > array to the * < code > BLOB < / code > value that this < code > Blob < / code > object represents * and returns the number of bytes written . * @ param pos the position in the < code > BLOB < / code > object at which to * start writing * @ param bytes the array of bytes to be written to this * < code > BLOB < / code > object * @ param offset the offset into the array < code > bytes < / code > at which * to start reading the bytes to be set * @ param len the number of bytes to be written to the < code > BLOB < / code > * value from the array of bytes < code > bytes < / code > * @ return the number of bytes written * @ throws SQLException if there is an error accessing the * < code > BLOB < / code > value */ public synchronized int setBytes ( long pos , byte [ ] bytes , int offset , int len ) throws SQLException { } }
if ( ! isInLimits ( bytes . length , offset , len ) ) { throw Util . outOfRangeArgument ( ) ; } if ( offset != 0 || len != bytes . length ) { byte [ ] newBytes = new byte [ len ] ; System . arraycopy ( bytes , ( int ) offset , newBytes , 0 , len ) ; bytes = newBytes ; } return setBytes ( pos , bytes ) ;
public class MicronautConsole { /** * Indicates progress by number . * @ param number The number */ @ Override public void indicateProgress ( int number ) { } }
verifySystemOut ( ) ; progressIndicatorActive = true ; String currMsg = lastMessage ; try { if ( isAnsiEnabled ( ) ) { updateStatus ( currMsg + ' ' + number ) ; } else { out . print ( ".." ) ; out . print ( number ) ; } } finally { lastMessage = currMsg ; }
public class FXBinder { /** * Start point of the fluent API to create a binding . * @ param list the JavaFX list * @ param < T > the data type of the list * @ return a binder that can be used by the fluent API to create binding . */ public static < T > JavaFXListBinder < T > bind ( ObservableList < T > list ) { } }
requireNonNull ( list , "list" ) ; return new DefaultJavaFXListBinder ( list ) ;
public class CWSTagger { /** * 得到分词结果 String [ ] , 不进行断句 * @ param src 字符串 * @ return String [ ] 词数组 , 每个元素为一个词 */ public String [ ] tag2Array ( String src ) { } }
ArrayList < String > words = tag2List ( src ) ; return ( String [ ] ) words . toArray ( new String [ words . size ( ) ] ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcBeam ( ) { } }
if ( ifcBeamEClass == null ) { ifcBeamEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 38 ) ; } return ifcBeamEClass ;
public class AbstractServer { /** * Stops the service . If a timeout is given and the service has still not * gracefully been stopped after timeout ms the service is stopped by force . * @ param millis value in ms */ @ Override public final synchronized void stopService ( long millis ) { } }
running = false ; try { if ( keepRunning ) { keepRunning = false ; interrupt ( ) ; quit ( ) ; if ( 0L == millis ) { join ( ) ; } else { join ( millis ) ; } } } catch ( InterruptedException e ) { // its possible that the thread exits between the lines keepRunning = false and interrupt above log . warn ( "Got interrupted while stopping {}" , this , e ) ; Thread . currentThread ( ) . interrupt ( ) ; }
public class PersistableImpl { /** * Add this Persistable and it ' s raw data managed object * to the provided transaction . * @ param tran The transaction under which the add of this Persistable to * the ObjectStore takes place . * @ param store The ObjectStore to add this Persistable to . * @ exception ObjectManagerException * @ exception PersistenceException * @ exception SevereMessageStoreException */ public void addToStore ( Transaction tran , ObjectStore store ) throws PersistenceException , ObjectManagerException , SevereMessageStoreException { } }
addToStore ( tran , store , this ) ;
public class WavefrontStrings { /** * Create a wavefront compatible string representation of the metric value . * If the metric value is empty or not representable in wavefront , an empty * optional will be returned . */ public static Optional < String > wavefrontValue ( MetricValue mv ) { } }
// Omit NaN and Inf . if ( mv . isInfiniteOrNaN ( ) ) return Optional . empty ( ) ; return mv . value ( ) . map ( Number :: toString ) ;
public class IntegrationAccountsInner { /** * Gets the integration account ' s Key Vault keys . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ param listKeyVaultKeys The key vault parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; KeyVaultKeyInner & gt ; object */ public Observable < ServiceResponse < List < KeyVaultKeyInner > > > listKeyVaultKeysWithServiceResponseAsync ( String resourceGroupName , String integrationAccountName , ListKeyVaultKeysDefinition listKeyVaultKeys ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( integrationAccountName == null ) { throw new IllegalArgumentException ( "Parameter integrationAccountName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( listKeyVaultKeys == null ) { throw new IllegalArgumentException ( "Parameter listKeyVaultKeys is required and cannot be null." ) ; } Validator . validate ( listKeyVaultKeys ) ; return service . listKeyVaultKeys ( this . client . subscriptionId ( ) , resourceGroupName , integrationAccountName , this . client . apiVersion ( ) , listKeyVaultKeys , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < KeyVaultKeyInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < KeyVaultKeyInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl2 < KeyVaultKeyInner > > result = listKeyVaultKeysDelegate ( response ) ; List < KeyVaultKeyInner > items = null ; if ( result . body ( ) != null ) { items = result . body ( ) . items ( ) ; } ServiceResponse < List < KeyVaultKeyInner > > clientResponse = new ServiceResponse < List < KeyVaultKeyInner > > ( items , result . response ( ) ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class HdfsState { /** * Reads the last txn record from index file if it exists , if not * from . tmp file if exists . * @ param indexFilePath the index file path * @ return the txn record from the index file or a default initial record . * @ throws IOException */ private TxnRecord getTxnRecord ( Path indexFilePath ) throws IOException { } }
Path tmpPath = tmpFilePath ( indexFilePath . toString ( ) ) ; if ( this . options . fs . exists ( indexFilePath ) ) { return readTxnRecord ( indexFilePath ) ; } else if ( this . options . fs . exists ( tmpPath ) ) { return readTxnRecord ( tmpPath ) ; } return new TxnRecord ( 0 , options . currentFile . toString ( ) , 0 ) ;
public class Mirror { /** * Convenience method for { @ link Mirror # proxify ( Class . . . ) } * @ see { @ link Mirror # on ( Class ) } * @ see { @ link Mirror # proxify ( Class . . . ) } */ @ SuppressWarnings ( "unchecked" ) public < T > ProxyHandler < T > proxify ( final Class < T > clazz ) { } }
return ( ProxyHandler < T > ) proxify ( new Class [ ] { clazz } ) ;
public class ConnectionFactoryImpl { /** * { @ inheritDoc } */ public boolean activate ( ) throws Exception { } }
if ( ! activated ) { jndiStrategy . bind ( jndiName , cf ) ; ( ( org . ironjacamar . core . connectionmanager . pool . Pool ) pool . getPool ( ) ) . prefill ( ) ; if ( recovery != null ) recovery . activate ( ) ; activated = true ; return true ; } return false ;
public class InodeTreePersistentState { /** * Sets an ACL for an inode . * @ param context journal context supplier * @ param entry set acl entry */ public void applyAndJournal ( Supplier < JournalContext > context , SetAclEntry entry ) { } }
try { applySetAcl ( entry ) ; context . get ( ) . append ( JournalEntry . newBuilder ( ) . setSetAcl ( entry ) . build ( ) ) ; } catch ( Throwable t ) { ProcessUtils . fatalError ( LOG , t , "Failed to apply %s" , entry ) ; throw t ; // fatalError will usually system . exit }
public class BuilderFactory { /** * Return the builder that builds the package summary . * @ param pkg the package being documented . * @ param prevPkg the previous package being documented . * @ param nextPkg the next package being documented . * @ return the builder that builds the constant summary . */ public AbstractBuilder getPackageSummaryBuilder ( PackageDoc pkg , PackageDoc prevPkg , PackageDoc nextPkg ) throws Exception { } }
return PackageSummaryBuilder . getInstance ( context , pkg , writerFactory . getPackageSummaryWriter ( pkg , prevPkg , nextPkg ) ) ;
public class CoverageDataPng { /** * Get the pixel value as a 16 bit unsigned integer value * @ param image * tile image * @ param x * x coordinate * @ param y * y coordinate * @ return unsigned integer pixel value */ public int getUnsignedPixelValue ( BufferedImage image , int x , int y ) { } }
short pixelValue = getPixelValue ( image , x , y ) ; int unsignedPixelValue = getUnsignedPixelValue ( pixelValue ) ; return unsignedPixelValue ;
public class Maybe { /** * { @ inheritDoc } */ @ Override public final < B > Maybe < B > zip ( Applicative < Function < ? super A , ? extends B > , Maybe < ? > > appFn ) { } }
return Monad . super . zip ( appFn ) . coerce ( ) ;
public class AbstractDatabaseEngine { /** * Executes the given update . * @ param query The update to execute . * @ throws DatabaseEngineException If something goes wrong executing the update . */ @ Override public synchronized int executeUpdate ( final Expression query ) throws DatabaseEngineException { } }
/* * Reconnection is already assured by " void executeUpdate ( final String query ) " . */ final String trans = translate ( query ) ; logger . trace ( trans ) ; return executeUpdate ( trans ) ;
public class Random { /** * Generate a uniform random number in the range [ lo , hi ) * @ param lo lower limit of range * @ param hi upper limit of range * @ return a uniform random real in the range [ lo , hi ) */ public double nextDouble ( double lo , double hi ) { } }
if ( lo < 0 ) { if ( nextInt ( 2 ) == 0 ) return - nextDouble ( 0 , - lo ) ; else return nextDouble ( 0 , hi ) ; } else { return ( lo + ( hi - lo ) * nextDouble ( ) ) ; }
public class SmoothieMap { /** * Returns the value to which the specified key is mapped , or { @ code defaultValue } if this map * contains no mapping for the key . * @ param key the key whose associated value is to be returned * @ param defaultValue the default mapping of the key * @ return the value to which the specified key is mapped , or { @ code defaultValue } if this map * contains no mapping for the key */ @ Override public final V getOrDefault ( Object key , V defaultValue ) { } }
long hash , allocIndex ; Segment < K , V > segment ; return ( allocIndex = ( segment = segment ( segmentIndex ( hash = keyHashCode ( key ) ) ) ) . find ( this , hash , key ) ) > 0 ? segment . readValue ( allocIndex ) : defaultValue ;